diff --git a/swh/web/tests/browse/views/data/content_test_data.py b/swh/web/tests/browse/views/data/content_test_data.py
deleted file mode 100644
index 75a6fc75..00000000
--- a/swh/web/tests/browse/views/data/content_test_data.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright (C) 2017-2019  The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU Affero General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-# flake8: noqa
-
-import os
-
-stub_content_root_dir = '08e8329257dad3a3ef7adea48aa6e576cd82de5b'
-
-stub_content_text_file = \
-"""
-/* This file is part of the KDE project
- *
- *  This library is free software; you can redistribute it and/or
- *  modify it under the terms of the GNU Library General Public
- *  License as published by the Free Software Foundation; either
- *  version 2 of the License, or (at your option) any later version.
- *
- *  This library is distributed in the hope that it will be useful,
- *  but WITHOUT ANY WARRANTY; without even the implied warranty of
- *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- *  Library General Public License for more details.
- *
- *  You should have received a copy of the GNU Library General Public License
- *  along with this library; see the file COPYING.LIB.  If not, write to
- *  the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
- *  Boston, MA 02110-1301, USA.
- */
-
-#ifndef KATE_SESSION_TEST_H
-#define KATE_SESSION_TEST_H
-
-#include <QObject>
-
-class KateSessionTest : public QObject
-{
-    Q_OBJECT
-
-private Q_SLOTS:
-    void init();
-    void cleanup();
-    void initTestCase();
-    void cleanupTestCase();
-
-    void create();
-    void createAnonymous();
-    void createAnonymousFrom();
-    void createFrom();
-    void documents();
-    void setFile();
-    void setName();
-    void timestamp();
-
-private:
-    class QTemporaryFile *m_tmpfile;
-};
-
-#endif
-"""
-
-stub_content_text_data = {
-    'checksums': {
-        'sha1': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1',
-        'sha1_git': '537b47f68469c1c916c1bfbc072599133bfcbb21',
-        'sha256': 'b3057544f04e5821ab0e2a007e2ceabd7de2dfb1d42a764f1de8d0d2eff80006',
-        'blake2s256': '25117fa9f124d5b771a0a7dfca9c7a57247d81f8343334b4b41c782c7f7ed64d'
-    },
-    'length': 1317,
-    'raw_data': str.encode(stub_content_text_file),
-    'mimetype': 'text/x-c++',
-    'encoding': 'us-ascii',
-    'language': 'c++',
-    'licenses': 'GPL',
-    'error_code': 200,
-    'error_message': '',
-    'error_description': ''
-}
-
-stub_content_text_path = 'kate/autotests/session_test.h'
-
-
diff --git a/swh/web/tests/browse/views/data/origin_test_data.py b/swh/web/tests/browse/views/data/origin_test_data.py
index 069d5500..2a87043c 100644
--- a/swh/web/tests/browse/views/data/origin_test_data.py
+++ b/swh/web/tests/browse/views/data/origin_test_data.py
@@ -1,692 +1,170 @@
 # Copyright (C) 2017-2018  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 # flake8: noqa
 
-origin_info_test_data = {
-    'id': 2,
-    'type': 'git',
-    'url': 'https://github.com/torvalds/linux'
-}
-
-origin_visits_test_data = [
- {'date': '2015-07-09T21:09:24+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': '62841f16e8592344b51afc272b98e98108f0b5c5',
-  'status': 'full',
-  'visit': 1},
- {'date': '2016-02-23T18:05:23.312045+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': '26befdbf4b393d1e03aa80f2a955bc38b241a8ac',
-  'status': 'full',
-  'visit': 2},
- {'date': '2016-03-28T01:35:06.554111+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': 'a07fe7f5bfacf1db47450f04340c7a7b45d3da74',
-  'status': 'full',
-  'visit': 3},
- {'date': '2016-06-18T01:22:24.808485+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': 'ce21f317d9fd74bb4af31b06207240031f4b2516',
-  'status': 'full',
-  'visit': 4},
- {'date': '2016-08-14T12:10:00.536702+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': 'fe0eac19141fdcdf039e8f5ace5e41b9a2398a49',
-  'status': 'full',
-  'visit': 5},
- {'date': '2016-08-17T09:16:22.052065+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': '6903f868df6d94a444818b50becd4835b29be274',
-  'status': 'full',
-  'visit': 6},
- {'date': '2016-08-29T18:55:54.153721+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': '6bd66993839dc897aa15a443c4e3b9164f811499',
-  'status': 'full',
-  'visit': 7},
- {'date': '2016-09-07T08:44:47.861875+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': 'c06a965f855f4d73c84fbefd859f7df507187d9c',
-  'status': 'full',
-  'visit': 8},
- {'date': '2016-09-14T10:36:21.505296+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': '40a5381e2b6c0c04775c5b7e7b37284c3affc129',
-  'status': 'full',
-  'visit': 9},
- {'date': '2016-09-23T10:14:02.169862+00:00',
-  'metadata': {},
-  'origin': 2,
-  'snapshot': '2252b4d49b9e786eb777a0097a42e51c7193bb9c',
-  'status': 'full',
-  'visit': 10}
-]
-
-stub_origin_info = {
-    'id': 7416001,
-    'type': 'git',
-    'url': 'https://github.com/webpack/webpack'
-}
-stub_visit_id = 10
-stub_visit_unix_ts = 1493909263
-stub_visit_iso_date = '2017-05-04T14:47:43+00:00'
-
 stub_origin_visits = [
  {'date': '2015-08-05T18:55:20.899865+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '23fac03bbf6f4d1037bc1477a85bc1c71e586f98',
   'visit': 1},
  {'date': '2016-03-06T12:16:26.240919+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': 'c71048f1d29a4889ef79f4a64e3c144efe83ea66',
   'visit': 2},
  {'date': '2016-03-21T11:40:10.329221+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '0d83f0dae76581e55b31ca96d3574261754f1f8f',
   'visit': 3},
  {'date': '2016-03-29T08:05:17.602649+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': 'eeb186a965a6df47327f34997ee164be66340046',
   'visit': 4},
  {'date': '2016-07-26T20:11:03.827577+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '1bf4bddbcf9be09ffeeaa68a85b53f039b2d32c2',
   'visit': 5},
  {'date': '2016-08-13T04:10:22.142897+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '57cfa801c5cba9b034f994c119e122fb153da3ec',
   'visit': 6},
  {'date': '2016-08-16T22:57:46.201737+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': 'd0c85af82c4c3abb2024c5c628f3e4b584c8b0ef',
   'visit': 7},
  {'date': '2016-08-17T17:58:43.346437+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '6ba2ff728eed2777156fd5c89424a2a46609f334',
   'visit': 8},
  {'date': '2016-08-29T23:29:09.445945+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': 'adb6d6adf04454f2b8acd6bf3c89d82dd84c3eed',
   'visit': 9},
  {'date': '2016-09-07T13:49:15.096109+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '8e29ad8af5f8a9bac86d26f48f956cc0ec69bcd9',
   'visit': 10},
  {'date': '2016-09-14T15:01:09.017257+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '78fbd0992f12cf1694257b2495e12bd2a3971643',
   'visit': 11},
  {'date': '2016-09-23T12:29:15.921727+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': '4fa28005f67b46f285bebe7228fe0a96a287ad94',
   'visit': 12},
  {'date': '2017-02-16T07:44:23.302439+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'partial',
   'snapshot': None,
   'visit': 13},
  {'date': '2017-05-04T14:47:43.228455+00:00',
   'metadata': {},
   'origin': 7416001,
   'status': 'full',
   'snapshot': 'ea21a9304f34a5b646f81994bd53d580de917427',
   'visit': 14}
 ]
 
 stub_origin_snapshot = (
 [
  {'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
   'name': 'HEAD',
   'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
   'date': '04 May 2017, 13:27 UTC',
   'message': 'Merge pull request #4816 from webpack/bugfix/hoist-immutable-export'},
  {'directory': 'c47a824f95109ca7cafdd1c3206332a0d10df55d',
   'name': 'refs/heads/0.10',
   'revision': 'f944553c77254732c4ce22c0add32aa1f641959d',
   'date': '19 June 2013, 12:46 UTC',
   'message': 'webpack 0.10'},
  {'directory': '45e31184ebb7699cd74175145c7eb11cce3f085e',
   'name': 'refs/heads/0.11',
   'revision': '0a29109a6e4579926ebc9b03a6301c61861cce62',
   'date': '31 December 2013, 12:43 UTC',
   'message': '0.11.18'},
  {'directory': '42346b33e2d16019490c273ff586ee88817327b3',
   'name': 'refs/heads/0.8',
   'revision': 'e42701dc6f9b035bfbb5d0fffded905d8b456db4',
   'date': 'e42701dc6f9b035bfbb5d0fffded905d8b456db4',
   'message': 'fixes #54'},
  {'directory': '828c7e9385523f852f8d4dac3cb241e319a9ce61',
   'name': 'refs/heads/0.9',
   'revision': '6c3f51e6d9491a2463ad099a2ca49255ec83ff00',
   'date': '19 March 2013, 07:56 UTC',
   'message': 'updated some small things on the cli'},
  {'directory': '2c50e78d63bdc4441c8d2691f5729b04f0ab3ecd',
   'name': 'refs/heads/1.0',
   'revision': 'fb7958d172e1ef6fb77f23bf56818ad24e896e5c',
   'date': '03 March 2014, 14:37 UTC',
   'message': 'Merge pull request #188 from polotek/patch-1'},
  {'directory': '31a3355c4d0a464aa311c5fa11c7f8b20aede6b4',
   'name': 'refs/heads/IgnorePluginHotfix',
   'revision': 'fdc922a2fa007e71b7ec07252012ffab9a178d4a',
   'date': '08 April 2017, 15:50 UTC',
   'message': 'add tests for ignored context modules'},
  {'directory': 'e566db1fc65cb61b3799c6e0f0ad06b2406f095f',
   'name': 'refs/heads/beta',
   'revision': '40428853da5d9ce6a8751e13b5e54145337b6a7e',
   'date': '04 May 2017, 13:35 UTC',
   'message': 'Merge remote-tracking branch \'origin/perf/chunks-set\' into beta'}
 ],
 [{'name': 'v2.1.0-beta.6',
   'branch_name': 'refs/tags/v2.1.0-beta.6',
   'message': '2.1.0-beta.6',
   'date': '22 April 2016, 01:03 UTC',
   'id': 'ae2e1a30e4f2ac701e8a6e2fe85a5f200d7e597a',
   'target_type': 'revision',
   'target': 'ca8b693c2c17bd06778476381fae23b3b21c0475',
   'directory': '4e1f9b3c2f5c4bd205051a14af4ade62349ee57a'},
  {'name': 'v2.1.0-beta.7',
   'branch_name': 'refs/tags/v2.1.0-beta.7',
   'message': '2.1.0-beta.7',
   'date': '07 May 2016, 00:00 UTC',
   'id': '46e94bbdc9e54cf6273a985732446b4c963bf1aa',
   'target_type': 'revision',
   'target': '9162f9e6eea62137139f95b8aaedee335c870edd',
   'directory': '713763f90f17371fec714c1660f229ba41b9f5e2'},
  {'name': 'v2.1.0-beta.8',
   'branch_name': 'refs/tags/v2.1.0-beta.8',
   'message': '2.1.0-beta.8',
   'date': '29 May 2016, 20:53 UTC',
   'id': '910ada6bf809f8f1c318e098f67f2c0b3c80c888',
   'target_type': 'revision',
   'target': 'abf0cefd592700a19856c3ef9b6d65f905ec73c1',
   'directory': 'd6a069fda992759670851dc38500b2e8dccdc595'},
  {'name': 'v2.1.0-beta.9',
   'branch_name': 'refs/tags/v2.1.0-beta.9',
   'message': '2.1.0-beta.9',
   'date': '04 June 2016, 20:19 UTC',
   'id': '63063663c86b0c7e5886adbd3c22aacba9b957b0',
   'target_type': 'revision',
   'target': 'dc3bd055027d8d1ebbb0ebdd07fb73387a0ab6d1',
   'directory': '467251807aea6ba83719194e9a1d65e8053f14e0'}
-])
-
-stub_origin_master_branch = 'HEAD'
-
-stub_origin_root_directory_sha1 = 'ae59ceecf46367e8e4ad800e231fc76adc3afffb'
-
-stub_origin_root_directory_entries = [
- {'checksums': {'sha1': '1a17dd2c8245559b43a90aa7c084572e917effff',
-                'sha1_git': '012966bd94e648f23b53e71a3f9918e28abc5d81',
-                'sha256': 'd65ab1f8cdb323e2b568a8e99814b1b986a38beed85a380981b383c0feb93525'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 394,
-  'name': '.editorconfig',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '012966bd94e648f23b53e71a3f9918e28abc5d81',
-  'type': 'file'},
- {'checksums': {'sha1': '2e727ec452dc592ae6038d3e09cd35d83d7ea265',
-                'sha1_git': '291a4e25598633cd7c286ad8d6cbe9eee5a6291a',
-                'sha256': 'd5951c8b796288e0dae1da50575d1b8619462a8df2272cd250146872a1fe804a'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 1839,
-  'name': '.eslintrc.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '291a4e25598633cd7c286ad8d6cbe9eee5a6291a',
-  'type': 'file'},
- {'checksums': {'sha1': '5c59880c0576b2789ec126b61b09fad7a982763b',
-                'sha1_git': 'ac579eb7bc04ba44fe84f3c8d1082573e9f4f514',
-                'sha256': '8a59a61ff6c0f568a8f76bab434baf3318c80a75ef6fb1b6eb861a0c97518de0'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 67,
-  'name': '.gitattributes',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'ac579eb7bc04ba44fe84f3c8d1082573e9f4f514',
-  'type': 'file'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': '.github',
-  'perms': 16384,
-  'target': '93bdcf98e9c05307b39a9d9e00e48cda6dbd036c',
-  'type': 'dir'},
- {'checksums': {'sha1': '7e1008eee2a373f0db7746d0416856aec6b95c22',
-                'sha1_git': '84bc35a3abab38bdf87a8f32cc82ce9c136d331e',
-                'sha256': '7de369f1d26bc34c7b6329de78973db07e341320eace6a8704a65d4c5bf5993f'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 167,
-  'name': '.gitignore',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '84bc35a3abab38bdf87a8f32cc82ce9c136d331e',
-  'type': 'file'},
- {'checksums': {'sha1': '06d96508b7d343ff42868f9b6406864517935da7',
-                'sha1_git': '79b049846744a2da3eb1c4ac3b01543f2bdca44a',
-                'sha256': '697733061d96dd2e061df04dcd86392bb792e2dbe5725a6cb14a436d3c8b76f1'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 706,
-  'name': '.jsbeautifyrc',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '79b049846744a2da3eb1c4ac3b01543f2bdca44a',
-  'type': 'file'},
- {'checksums': {'sha1': '8041a4a66f46e615c99a850700850a8bd1079dce',
-                'sha1_git': '90e4f1ef5beb167891b2e029da6eb9b14ab17add',
-                'sha256': '3d6a76a57351b9e3acc5843ff2127dc2cf70c023133312143f86ee74ba9ef6d3'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 1059,
-  'name': '.travis.yml',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '90e4f1ef5beb167891b2e029da6eb9b14ab17add',
-  'type': 'file'},
- {'checksums': {'sha1': 'cd52973e43c6f4294e8cdfd3106df602b9993f20',
-                'sha1_git': 'e5279ebcecd87445648d003c36e6abfebed0ed73',
-                'sha256': '130672b16dff61b1541b6d26c2e568ac11830a31d04faace1583d3ad4a38720e'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 2058,
-  'name': 'CONTRIBUTING.md',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'e5279ebcecd87445648d003c36e6abfebed0ed73',
-  'type': 'file'},
- {'checksums': {'sha1': '3bebb9ba92e45dd02a0512e144f6a46b14a9b8ab',
-                'sha1_git': '8c11fc7289b75463fe07534fcc8224e333feb7ff',
-                'sha256': '9068a8782d2fb4c6e432cfa25334efa56f722822180570802bf86e71b6003b1e'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 1071,
-  'name': 'LICENSE',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '8c11fc7289b75463fe07534fcc8224e333feb7ff',
-  'type': 'file'},
- {'checksums': {'sha1': '6892825420196e84c7104a7ff71ec75db20a1fca',
-                'sha1_git': '8f96a0a6d3bfe7183765938483585f3981151553',
-                'sha256': 'b0170cfc28f56ca718b43ab086ca5428f853268687c8c033b4fbf028c66d663e'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 46700,
-  'name': 'README.md',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '8f96a0a6d3bfe7183765938483585f3981151553',
-  'type': 'file'},
- {'checksums': {'sha1': '9bc4902b282f9f1c9f8f885a6947f3bf0f6e6e5f',
-                'sha1_git': 'dd6912c8fc97eff255d64da84cfd9837ebf0a05a',
-                'sha256': 'e06dbc101195ec7ea0b9aa236be4bdc03784a01f64d6e11846ce3a3f6e1080c6'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 590,
-  'name': 'appveyor.yml',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'dd6912c8fc97eff255d64da84cfd9837ebf0a05a',
-  'type': 'file'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'benchmark',
-  'perms': 16384,
-  'target': '6bd2996b76e051982aa86499a2b485594e607fe3',
-  'type': 'dir'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'bin',
-  'perms': 16384,
-  'target': '681da97ea1ce9a2bd29e3e72781d80e8b961cd51',
-  'type': 'dir'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'buildin',
-  'perms': 16384,
-  'target': '35cfb25d1b3a4063bf04a43f9cbb7e1e87703708',
-  'type': 'dir'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'ci',
-  'perms': 16384,
-  'target': 'efccd3ce0a0304c8cbcffcfdfcafcf1e598819b8',
-  'type': 'dir'},
- {'checksums': {'sha1': '9eb3d0e3711f68f82d29785e64ebff2c0d7cec7a',
-                'sha1_git': '1ecf877e445bcf865ef53cfcecadda7e9691aace',
-                'sha256': '2007e0883c2784bb82584a10d53a0f0c36286dd913741bfd5e4d22b812db529c'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 529,
-  'name': 'circle.yml',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '1ecf877e445bcf865ef53cfcecadda7e9691aace',
-  'type': 'file'},
- {'checksums': {'sha1': '63209428718e101492c3bb91509f1b4e319b0d7d',
-                'sha1_git': 'b3fa4e6abe22977e6267e9969a593e790bf2cd36',
-                'sha256': '5d14c8d70215f46a9722d29c7ebff8cc9bd24509650d7ee601fd461e52a52f7f'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 254,
-  'name': 'codecov.yml',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'b3fa4e6abe22977e6267e9969a593e790bf2cd36',
-  'type': 'file'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'examples',
-  'perms': 16384,
-  'target': '7e3ac01795317fbc36a031a9117e7963d6c7da90',
-  'type': 'dir'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'hot',
-  'perms': 16384,
-  'target': 'a5eea6ca952fba9f7ae4177627ed5e22754df9f5',
-  'type': 'dir'},
- {'checksums': {'sha1': '92d9367db4ba049f698f5bf78b6946b8e2d91345',
-                'sha1_git': 'eaa9cc4a247b01d6a9c0adc91997fefe6a62be1f',
-                'sha256': 'd4b42fa0651cf3d99dea0ca5bd6ba64cc21e80be7d9ea05b2b4423ef8f16ec36'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 19,
-  'name': 'input.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'eaa9cc4a247b01d6a9c0adc91997fefe6a62be1f',
-  'type': 'file'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'lib',
-  'perms': 16384,
-  'target': '187d40104aa21475d8af88ccd77fc582cf6ac7a6',
-  'type': 'dir'},
- {'checksums': {'sha1': 'f17ffa2dc14262292e2275efa3730a96fe060c44',
-                'sha1_git': 'd55b7110929cbba3d94da01494a272b39878ac0f',
-                'sha256': '012d4446ef8ab6656251b1b7f8e0217a5666ec04ad952e8a617b70946de17166'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 9132,
-  'name': 'open-bot.yaml',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'd55b7110929cbba3d94da01494a272b39878ac0f',
-  'type': 'file'},
- {'checksums': {'sha1': '3a6638e72fcc2499f1a4c9b46d4d00d239bbe1c8',
-                'sha1_git': '6d1aa82c90ecd184d136151eb81d240e1fea723e',
-                'sha256': '00faf7dde1eb0742f3ca567af4dbcd8c01a38cf30d8faa7f0208f46dbc6b5201'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 4034,
-  'name': 'package.json',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '6d1aa82c90ecd184d136151eb81d240e1fea723e',
-  'type': 'file'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'schemas',
-  'perms': 16384,
-  'target': 'f1f89c389f73c29e7a5d1a0ce5f9e0f166857815',
-  'type': 'dir'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'test',
-  'perms': 16384,
-  'target': '318c279189d186a1e06653fc5c78c539878c4d7d',
-  'type': 'dir'},
- {'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': None,
-  'name': 'web_modules',
-  'perms': 16384,
-  'target': '93a5cc8e492d0b0323386814a72536381019ef7b',
-  'type': 'dir'},
- {'checksums': {'sha1': '8047389fcc8e286ceed5536c677c2e803032cf84',
-                'sha1_git': 'eb8509f70158c231a3fd864aecf2649590bbedf3',
-                'sha256': '8cbe1ce94349ac3bc6cbcc952efd45d838c6b4524af8a773b18e1ebe8b4f936b'},
-  'dir_id': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
-  'length': 141192,
-  'name': 'yarn.lock',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'eb8509f70158c231a3fd864aecf2649590bbedf3',
-  'type': 'file'},
-  {'dir_id': '3b2cbf1a58e7d2848049cf8f1dc1f9ff22415877',
-   'length': None,
-   'name': 'revision',
-   'perms': 57344,
-   'target': '5da5c4eece39ffb3ae824da433f7e4d004fba217',
-   'target_url': '/api/1/revision/5da5c4eece39ffb3ae824da433f7e4d004fba217/',
-   'type': 'rev'},
-]
-
-stub_origin_sub_directory_path = 'lib/webworker'
-
-stub_origin_sub_directory_entries = [
- {'checksums': {'sha1': '7bf366cd9f4a9835c73aafb70e44f640bab7ad16',
-                'sha1_git': '870252b7a175ee5ec2edfe2c22b2d56aa04bece4',
-                'sha256': 'e0af438932627dd9d53b36bfe69c3dbad6dc4d4569f6cdb29d606c9df2b128fa'},
-  'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
-  'length': 921,
-  'name': 'WebWorkerChunkTemplatePlugin.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '870252b7a175ee5ec2edfe2c22b2d56aa04bece4',
-  'type': 'file'},
- {'checksums': {'sha1': 'e2862b2787702bd3eb856f73627d5d8df5a8b550',
-                'sha1_git': 'b3e90d26a68ad9da0a7cc97a262db585fa4c73ba',
-                'sha256': '1c254e76248ff5ec7e2185cdb1cfd2e0338087244d2d617a868c346317b7646b'},
-  'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
-  'length': 1039,
-  'name': 'WebWorkerHotUpdateChunkTemplatePlugin.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'b3e90d26a68ad9da0a7cc97a262db585fa4c73ba',
-  'type': 'file'},
- {'checksums': {'sha1': 'a1e04061d3e50bb8c024b07e9464da7392f37bf1',
-                'sha1_git': '1e503e028fdd5322c9f7d8ec50f54006cacf334e',
-                'sha256': '72dea06510d1a4435346f8dca20d8898a394c52c7382a97bd73d1840e31f90b3'},
-  'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
-  'length': 1888,
-  'name': 'WebWorkerMainTemplate.runtime.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '1e503e028fdd5322c9f7d8ec50f54006cacf334e',
-  'type': 'file'},
- {'checksums': {'sha1': 'b95c16e90784cf7025352839133b482149526da0',
-                'sha1_git': '46c9fe382d606ce19e556deeae6a23af47a8027d',
-                'sha256': 'c78c7ca9ee0aa341f843a431ef27c75c386607be3037d44ff530bfe3218edb3c'},
-  'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
-  'length': 4051,
-  'name': 'WebWorkerMainTemplatePlugin.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': '46c9fe382d606ce19e556deeae6a23af47a8027d',
-  'type': 'file'},
- {'checksums': {'sha1': 'ec9df36b1e8dd689d84dbeeeb9f45fe9f9d96605',
-                'sha1_git': 'd850018bb0d2ad41dd0ae9e5c887dff8a23601e9',
-                'sha256': 'f995f6a13511955244850c2344c6cef09c10ab24c49f8448544e2b34aa69d03c'},
-  'dir_id': '02b626051e0935ecd28f50337f452db76803f980',
-  'length': 763,
-  'name': 'WebWorkerTemplatePlugin.js',
-  'perms': 33188,
-  'status': 'visible',
-  'target': 'd850018bb0d2ad41dd0ae9e5c887dff8a23601e9',
-  'type': 'file'},
-  {'dir_id': '3b2cbf1a58e7d2848049cf8f1dc1f9ff22415877',
-   'length': None,
-   'name': 'revision',
-   'perms': 57344,
-   'target': '5da5c4eece39ffb3ae824da433f7e4d004fba217',
-   'target_url': '/api/1/revision/5da5c4eece39ffb3ae824da433f7e4d004fba217/',
-   'type': 'rev'}
-]
-
-stub_content_origin_info = {
-    'id': 10357753,
-    'type': 'git',
-    'url': 'https://github.com/KDE/kate'
-}
-
-stub_content_origin_visit_id = 10
-stub_content_origin_visit_unix_ts = 1471457439
-stub_content_origin_visit_iso_date = '2016-08-17T18:10:39+00'
-
-stub_content_origin_branch = 'HEAD'
-
-stub_content_origin_visits = [
- {'date': '2015-09-26T09:30:52.373449+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
-  'status': 'full',
-  'visit': 1},
- {'date': '2016-03-10T05:36:11.118989+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': '2ab1ee17cbaf6fd477832ace039ad85ade748e70',
-  'status': 'full',
-  'visit': 2},
- {'date': '2016-03-24T07:39:29.727793+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': 'e8f19fe946c251fd69989dabe66a9d1b2cba00f6',
-  'status': 'full',
-  'visit': 3},
- {'date': '2016-03-31T22:55:31.402863+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': '34a10743dca51484098931a6cf6933582013b458',
-  'status': 'full',
-  'visit': 4},
- {'date': '2016-05-26T06:25:54.879676+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': 'd8c98ebdf07b2b6542bd74501334b4760b223f9d',
-  'status': 'full',
-  'visit': 5},
- {'date': '2016-06-07T17:16:33.964164+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': '6d8747764f926c8608be3c37f3fe2e516faf5bf2',
-  'status': 'full',
-  'visit': 6},
- {'date': '2016-07-27T01:38:20.345358+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': '9fed7618f1b022bcca931c6d29db57b18d843b07',
-  'status': 'full',
-  'visit': 7},
- {'date': '2016-08-13T04:46:45.987508+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': '4d7cfa75c52152122050914b88ef07a63a8dad9d',
-  'status': 'full',
-  'visit': 8},
- {'date': '2016-08-16T23:24:13.214496+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': 'bfa9790e0bfad52322acf4d348b97bbc5534db8b',
-  'status': 'full',
-  'visit': 9},
- {'date': '2016-08-17T18:10:39.841005+00:00',
-  'metadata': {},
-  'origin': 10357753,
-  'snapshot': 'bfa9790e0bfad52322acf4d348b97bbc5534db8b',
-  'status': 'full',
-  'visit': 10}
-]
-
-stub_content_origin_snapshot = (
-[
- {'directory': '08e8329257dad3a3ef7adea48aa6e576cd82de5b',
-  'name': 'HEAD',
-  'revision': '11f15b0789344427ddf17b8d75f38577c4395ce0',
-  'date': '02 May 2017, 05:33 UTC',
-  'message': 'GIT_SILENT made messages (after extraction)'},
- {'directory': '2371baf0411e3adf12d65daf86c3b135633dd5e4',
-  'name': 'refs/heads/Applications/14.12',
-  'revision': '5b27ad32f8c8da9b6fc898186d59079488fb74c9',
-  'date': '23 February 2015, 12:10 UTC',
-  'message': 'SVN_SILENT made messages (.desktop file)'},
- {'directory': '5d024d33a218eeb164936301a2f89231d1f0854a',
-  'name': 'refs/heads/Applications/15.04',
-  'revision': '4f1e29120795ac643044991e91f24d02c9980202',
-  'date': '04 July 2015, 12:34 UTC',
-  'message': 'SVN_SILENT made messages (.desktop file)'},
- {'directory': 'f33984df50ec29dbbc86295adb81ebb831e3b86d',
-  'name': 'refs/heads/Applications/15.08',
-  'revision': '52722e588f46a32b480b5f304ba21480fc8234b1',
-  'date': '12 June 2016, 20:28 UTC',
-  'message': 'Correctly restore view config of all split views'},
- {'directory': 'e706b836cf32929a48b6f92c07766f237f9d068f',
-  'name': 'refs/heads/Applications/15.12',
-  'revision': '38c4e42c4a653453fc668c704bb8995ae31b5baf',
-  'date': '06 September 2016, 12:01 UTC',
-  'message': 'Fix crash in tab switcher plugin when using split views'},
- {'directory': 'ebf8ae783b44df5c827bfa46227e5dbe98f25eb4',
-  'name': 'refs/heads/Applications/16.04',
-  'revision': 'd0fce3b880ab37a551d75ec940137e0f46bf2143',
-  'date': '06 September 2016, 12:01 UTC',
-  'message': 'Fix crash in tab switcher plugin when using split views'}
-],
-[{'name': 'v4.9.90',
-  'branch_name': 'refs/tags/v4.9.90',
-  'message': 'KDE 4.9.90',
-  'date': '09 December 2012, 23:15 UTC',
-  'id': 'f6a3a31474a86023377ce6fa1cbec3d9ab809d06',
-  'target_type': 'revision',
-  'target': '4dd3d7de2f684fcdf27028bafdc022183e33610d',
-  'directory': 'a5b9c74c35732189b8aa7567f979f9ac36fdb8bc'},
- {'name': 'v4.9.95',
-  'branch_name': 'refs/tags/v4.9.95',
-  'message': 'KDE 4.9.95',
-  'date': '02 January 2013, 19:00 UTC',
-  'id': '74bab04b34b243269354f6e5530d6d0edf92f84d',
-  'target_type': 'revision',
-  'target': '6bd42579908cf62f094ebca0e100832208967428',
-  'directory': 'aaeba0a71293465b9026249381c0a1f13a13a43f'},
- {'name': 'v4.9.97',
-  'branch_name': 'refs/tags/v4.9.97',
-  'message': 'KDE 4.9.97',
-  'date': '05 January 2013, 20:34 UTC',
-  'id': 'd8bf93d6915c4ab17de882c443423f281c961a1c',
-  'target_type': 'revision',
-  'target': '5fbd023fc46ecc57a6772be2aa04f532e8426f43',
-  'directory': '0ce36caec34ad7c930f35eca907148208b2a3f2b'},
- {'name': 'v4.9.98',
-  'branch_name': 'refs/tags/v4.9.98',
-  'message': 'KDE 4.9.98',
-  'date': '21 January 2013, 19:36 UTC',
-  'id': '9bf0265d4fce650926bfd93b117584eb3fd0bd73',
-  'target_type': 'revision',
-  'target': '670aff3a940fecf6a085fe71a5bead2edcad8a55',
-  'directory': '0747fbcc783dfab9e857040287ed400df145079d'}
-])
+])
\ No newline at end of file
diff --git a/swh/web/tests/browse/views/test_content.py b/swh/web/tests/browse/views/test_content.py
index f88bf07a..47e3edd2 100644
--- a/swh/web/tests/browse/views/test_content.py
+++ b/swh/web/tests/browse/views/test_content.py
@@ -1,350 +1,350 @@
 # Copyright (C) 2017-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 from unittest.mock import patch
 
 from django.utils.html import escape
 
 from hypothesis import given
 
 from swh.web.browse.utils import (
     get_mimetype_and_encoding_for_content, prepare_content_for_display,
     _reencode_content
 )
 from swh.web.common.exc import NotFoundExc
 from swh.web.common.utils import reverse, get_swh_persistent_id
 from swh.web.common.utils import gen_path_info
 from swh.web.tests.strategies import (
     content, content_text_non_utf8, content_text_no_highlight,
     content_image_type, content_text, invalid_sha1, unknown_content
 )
 from swh.web.tests.testcase import WebTestCase
 
 
 class SwhBrowseContentTest(WebTestCase):
 
     @given(content())
     def test_content_view_text(self, content):
 
         sha1_git = content['sha1_git']
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']},
                       query_params={'path': content['path']})
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         content_display = self._process_content_for_display(content)
         mimetype = content_display['mimetype']
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
 
         if mimetype.startswith('text/'):
             self.assertContains(resp, '<code class="%s">' %
                                       content_display['language'])
             self.assertContains(resp, escape(content_display['content_data']))
         self.assertContains(resp, url_raw)
 
         swh_cnt_id = get_swh_persistent_id('content', sha1_git)
         swh_cnt_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_cnt_id})
         self.assertContains(resp, swh_cnt_id)
         self.assertContains(resp, swh_cnt_id_url)
 
     @given(content_text_no_highlight())
     def test_content_view_text_no_highlight(self, content):
 
         sha1_git = content['sha1_git']
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']})
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         content_display = self._process_content_for_display(content)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
 
         self.assertContains(resp, '<code class="nohighlight">')
         self.assertContains(resp, escape(content_display['content_data'])) # noqa
         self.assertContains(resp, url_raw)
 
         swh_cnt_id = get_swh_persistent_id('content', sha1_git)
         swh_cnt_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_cnt_id})
 
         self.assertContains(resp, swh_cnt_id)
         self.assertContains(resp, swh_cnt_id_url)
 
     @given(content_text_non_utf8())
     def test_content_view_no_utf8_text(self, content):
 
         sha1_git = content['sha1_git']
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         content_display = self._process_content_for_display(content)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
         swh_cnt_id = get_swh_persistent_id('content', sha1_git)
         swh_cnt_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_cnt_id})
         self.assertContains(resp, swh_cnt_id_url)
         self.assertContains(resp, escape(content_display['content_data']))
 
     @given(content_image_type())
     def test_content_view_image(self, content):
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']})
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         content_display = self._process_content_for_display(content)
         mimetype = content_display['mimetype']
         content_data = content_display['content_data']
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
 
         self.assertContains(resp, '<img src="data:%s;base64,%s"/>'
                                   % (mimetype, content_data.decode('utf-8')))
         self.assertContains(resp, url_raw)
 
     @given(content())
     def test_content_view_with_path(self, content):
 
         path = content['path']
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']},
                       query_params={'path': path})
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
 
         self.assertContains(resp, '<nav class="bread-crumbs')
 
         content_display = self._process_content_for_display(content)
         mimetype = content_display['mimetype']
 
         if mimetype.startswith('text/'):
-            hljs_language = content['hljs-language']
+            hljs_language = content['hljs_language']
             self.assertContains(resp, '<code class="%s">' % hljs_language)
             self.assertContains(resp, escape(content_display['content_data']))
 
         split_path = path.split('/')
 
         root_dir_sha1 = split_path[0]
         filename = split_path[-1]
         path = path.replace(root_dir_sha1 + '/', '').replace(filename, '')
 
         path_info = gen_path_info(path)
 
         root_dir_url = reverse('browse-directory',
                                url_args={'sha1_git': root_dir_sha1})
 
         self.assertContains(resp, '<li class="swh-path">',
                             count=len(path_info)+1)
 
         self.assertContains(resp, '<a href="' + root_dir_url + '">' +
                             root_dir_sha1[:7] + '</a>')
 
         for p in path_info:
             dir_url = reverse('browse-directory',
                               url_args={'sha1_git': root_dir_sha1,
                                         'path': p['path']})
             self.assertContains(resp, '<a href="' + dir_url + '">' +
                                 p['name'] + '</a>')
 
         self.assertContains(resp, '<li>' + filename + '</li>')
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': content['sha1']},
                           query_params={'filename': filename})
         self.assertContains(resp, url_raw)
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']},
                       query_params={'path': filename})
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
 
         self.assertNotContains(resp, '<nav class="bread-crumbs')
 
     @given(content_text())
     def test_content_raw_text(self, content):
 
         url = reverse('browse-content-raw',
                       url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         content_data = self.content_get(content['sha1'])['data']
 
         self.assertEqual(resp.status_code, 200)
         self.assertEqual(resp['Content-Type'], 'text/plain')
         self.assertEqual(resp['Content-disposition'],
                          'filename=%s_%s' % ('sha1', content['sha1']))
         self.assertEqual(resp.content, content_data)
 
         filename = content['path'].split('/')[-1]
 
         url = reverse('browse-content-raw',
                       url_args={'query_string': content['sha1']}, # noqa
                       query_params={'filename': filename})
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertEqual(resp['Content-Type'], 'text/plain')
         self.assertEqual(resp['Content-disposition'],
                          'filename=%s' % filename)
         self.assertEqual(resp.content, content_data)
 
     @given(content_text_non_utf8())
     def test_content_raw_no_utf8_text(self, content):
 
         url = reverse('browse-content-raw',
                       url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         _, encoding = get_mimetype_and_encoding_for_content(resp.content)
         self.assertEqual(encoding, content['encoding'])
 
     @given(content_image_type())
     def test_content_raw_bin(self, content):
 
         url = reverse('browse-content-raw',
                       url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         filename = content['path'].split('/')[-1]
         content_data = self.content_get(content['sha1'])['data']
 
         self.assertEqual(resp.status_code, 200)
         self.assertEqual(resp['Content-Type'], 'application/octet-stream')
         self.assertEqual(resp['Content-disposition'],
                          'attachment; filename=%s_%s' %
                          ('sha1', content['sha1']))
         self.assertEqual(resp.content, content_data)
 
         url = reverse('browse-content-raw',
                       url_args={'query_string': content['sha1']},
                       query_params={'filename': filename})
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertEqual(resp['Content-Type'], 'application/octet-stream')
         self.assertEqual(resp['Content-disposition'],
                          'attachment; filename=%s' % filename)
         self.assertEqual(resp.content, content_data)
 
     @given(invalid_sha1(), unknown_content())
     def test_content_request_errors(self, invalid_sha1, unknown_content):
 
         url = reverse('browse-content',
                       url_args={'query_string': invalid_sha1})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 400)
         self.assertTemplateUsed('error.html')
 
         url = reverse('browse-content',
                       url_args={'query_string': unknown_content['sha1']})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
 
     @patch('swh.web.browse.utils.service')
     @given(content())
     def test_content_bytes_missing(self, mock_service, content):
 
         content_data = self.content_get_metadata(content['sha1'])
         content_data['data'] = None
 
         mock_service.lookup_content.return_value = content_data
         mock_service.lookup_content_raw.side_effect = NotFoundExc(
             'Content bytes not available!')
 
         url = reverse('browse-content',
                       url_args={'query_string': content['sha1']})
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('browse/content.html')
 
     @patch('swh.web.browse.views.content.request_content')
     def test_content_too_large(self, mock_request_content):
         stub_content_too_large_data = {
             'checksums': {
                 'sha1': '8624bcdae55baeef00cd11d5dfcfa60f68710a02',
                 'sha1_git': '94a9ed024d3859793618152ea559a168bbcbb5e2',
                 'sha256': ('8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b16'
                            '5a1dcd80c7c545eb65b903'),
                 'blake2s256': ('38702b7168c7785bfe748b51b45d9856070ba90'
                                'f9dc6d90f2ea75d4356411ffe')
             },
             'length': 3000000,
             'raw_data': None,
             'mimetype': 'text/plain',
             'encoding': 'us-ascii',
             'language': 'not detected',
             'licenses': 'GPL',
             'error_code': 200,
             'error_message': '',
             'error_description': ''
         }
 
         content_sha1 = stub_content_too_large_data['checksums']['sha1']
 
         mock_request_content.return_value = stub_content_too_large_data
 
         url = reverse('browse-content',
                       url_args={'query_string': content_sha1})
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': content_sha1})
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('browse/content.html')
 
         self.assertContains(resp, 'Content is too large to be displayed')
         self.assertContains(resp, url_raw)
 
     def _process_content_for_display(self, content):
         content_data = self.content_get(content['sha1'])
 
         mime_type, encoding = get_mimetype_and_encoding_for_content(
             content_data['data'])
 
         mime_type, content_data = _reencode_content(mime_type, encoding,
                                                     content_data['data'])
 
         return prepare_content_for_display(content_data, mime_type,
                                            content['path'])
diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py
index 4425c196..0f327760 100644
--- a/swh/web/tests/browse/views/test_origin.py
+++ b/swh/web/tests/browse/views/test_origin.py
@@ -1,919 +1,870 @@
-# Copyright (C) 2017-2018  The Software Heritage developers
+# Copyright (C) 2017-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
-# flake8: noqa
+import random
 
 from unittest.mock import patch
 
 from django.utils.html import escape
 
+from hypothesis import given
+
+from swh.web.browse.utils import process_snapshot_branches
 from swh.web.common.exc import NotFoundExc
 from swh.web.common.utils import (
     reverse, gen_path_info, format_utc_iso_date,
     parse_timestamp, get_swh_persistent_id
 )
-from swh.web.tests.testcase import WebTestCase
-
-from .data.origin_test_data import (
-    origin_info_test_data,
-    origin_visits_test_data,
-    stub_content_origin_info, stub_content_origin_visit_id,
-    stub_content_origin_visit_unix_ts, stub_content_origin_visit_iso_date,
-    stub_content_origin_branch,
-    stub_content_origin_visits, stub_content_origin_snapshot,
-    stub_origin_info, stub_visit_id,
-    stub_origin_visits, stub_origin_snapshot,
-    stub_origin_root_directory_entries, stub_origin_master_branch,
-    stub_origin_root_directory_sha1, stub_origin_sub_directory_path,
-    stub_origin_sub_directory_entries, stub_visit_unix_ts, stub_visit_iso_date
-)
-
-from .data.content_test_data import (
-    stub_content_root_dir,
-    stub_content_text_data,
-    stub_content_text_path
+from swh.web.tests.data import get_content
+from swh.web.tests.strategies import (
+    origin, origin_with_multiple_visits
 )
+from swh.web.tests.testcase import WebTestCase
 
-stub_origin_info_no_type = dict(stub_origin_info)
-stub_origin_info_no_type['type'] = None
-
-def _to_snapshot_dict(branches=None, releases=None):
-    snp = {'branches': {}}
-    if branches:
-        for b in branches:
-            snp['branches'][b['name']] = {
-                'target': b['revision'],
-                'target_type': 'revision'
-            }
-    if releases:
-        for r in releases:
-            snp['branches'][r['branch_name']] =  {
-                'target': r['id'],
-                'target_type': 'release'
-            }
-    return snp
 
 class SwhBrowseOriginTest(WebTestCase):
 
-    @patch('swh.web.browse.utils.service')
-    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
-    @patch('swh.web.common.origin_visits.get_origin_visits')
-    @patch('swh.web.browse.utils.get_origin_info')
-    @patch('swh.web.browse.views.origin.get_origin_info')
-    @patch('swh.web.browse.views.origin.get_origin_visits')
-    @patch('swh.web.browse.views.origin.service')
-    def test_origin_visits_browse(self, mock_service, mock_get_origin_visits,
-                                  mock_get_origin_info, mock_get_origin_info_utils,
-                                  mock_get_origin_visits_utils,
-                                  mock_get_origin_visit_snapshot,
-                                  mock_utils_service):
-        mock_service.lookup_origin.return_value = origin_info_test_data
-        mock_get_origin_info.return_value = origin_info_test_data
-        mock_get_origin_info_utils.return_value = origin_info_test_data
-        mock_get_origin_visits.return_value = origin_visits_test_data
-        mock_get_origin_visits_utils.return_value = origin_visits_test_data
-        mock_get_origin_visit_snapshot.return_value = stub_content_origin_snapshot
-        mock_utils_service.lookup_snapshot_size.return_value = {
-            'revision': len(stub_content_origin_snapshot[0]),
-            'release': len(stub_content_origin_snapshot[1])
-        }
+    @given(origin_with_multiple_visits())
+    def test_origin_visits_browse(self, origin):
 
         url = reverse('browse-origin-visits',
-                      url_args={'origin_type': origin_info_test_data['type'],
-                              'origin_url': origin_info_test_data['url']})
+                      url_args={'origin_type': origin['type'],
+                                'origin_url': origin['url']})
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('origin-visits.html')
 
         url = reverse('browse-origin-visits',
-                      url_args={'origin_url': origin_info_test_data['url']})
+                      url_args={'origin_url': origin['url']})
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('origin-visits.html')
 
+        visits = self.origin_visit_get(origin['id'])
+
+        for v in visits:
+            vdate = format_utc_iso_date(v['date'], '%Y-%m-%dT%H:%M:%SZ')
+            browse_dir_url = reverse('browse-origin-directory',
+                                     url_args={'origin_url': origin['url'],
+                                               'timestamp': vdate})
+            self.assertContains(resp, browse_dir_url)
+
     def origin_content_view_helper(self, origin_info, origin_visits,
                                    origin_branches, origin_releases,
-                                   origin_branch,
-                                   root_dir_sha1, content_sha1, content_sha1_git,
-                                   content_path, content_data,
-                                   content_language,
+                                   root_dir_sha1, content,
                                    visit_id=None, timestamp=None):
 
+        content_path = '/'.join(content['path'].split('/')[1:])
+
         url_args = {'origin_type': origin_info['type'],
                     'origin_url': origin_info['url'],
                     'path': content_path}
 
         if not visit_id:
             visit_id = origin_visits[-1]['visit']
 
         query_params = {}
 
         if timestamp:
             url_args['timestamp'] = timestamp
 
         if visit_id:
             query_params['visit_id'] = visit_id
 
         url = reverse('browse-origin-content',
                       url_args=url_args,
                       query_params=query_params)
 
         resp = self.client.get(url)
+
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('content.html')
 
-        self.assertContains(resp, '<code class="%s">' % content_language)
-        self.assertContains(resp, escape(content_data))
+        self.assertContains(resp, '<code class="%s">' %
+                                  content['hljs_language'])
+        self.assertContains(resp, escape(content['data']))
 
         split_path = content_path.split('/')
 
         filename = split_path[-1]
         path = content_path.replace(filename, '')[:-1]
 
         path_info = gen_path_info(path)
 
         del url_args['path']
 
         if timestamp:
             url_args['timestamp'] = \
                 format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
                                     '%Y-%m-%dT%H:%M:%S')
 
         root_dir_url = reverse('browse-origin-directory',
                                url_args=url_args,
                                query_params=query_params)
 
         self.assertContains(resp, '<li class="swh-path">',
                             count=len(path_info)+1)
 
-
         self.assertContains(resp, '<a href="%s">%s</a>' %
                             (root_dir_url, root_dir_sha1[:7]))
 
         for p in path_info:
             url_args['path'] = p['path']
             dir_url = reverse('browse-origin-directory',
                               url_args=url_args,
                               query_params=query_params)
             self.assertContains(resp, '<a href="%s">%s</a>' %
                                 (dir_url, p['name']))
 
         self.assertContains(resp, '<li>%s</li>' % filename)
 
-        query_string = 'sha1_git:' + content_sha1
+        query_string = 'sha1_git:' + content['sha1_git']
 
         url_raw = reverse('browse-content-raw',
                           url_args={'query_string': query_string},
                           query_params={'filename': filename})
         self.assertContains(resp, url_raw)
 
-        del url_args['path']
+        if 'args' in url_args:
+            del url_args['path']
 
-        origin_branches_url = \
-                reverse('browse-origin-branches',
-                        url_args=url_args,
-                        query_params=query_params)
+        origin_branches_url = reverse('browse-origin-branches',
+                                      url_args=url_args,
+                                      query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
-            (origin_branches_url, len(origin_branches)))
+                                  (origin_branches_url, len(origin_branches)))
 
-        origin_releases_url = \
-                reverse('browse-origin-releases',
-                        url_args=url_args,
-                        query_params=query_params)
+        origin_releases_url = reverse('browse-origin-releases',
+                                      url_args=url_args,
+                                      query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
-            (origin_releases_url, len(origin_releases)))
+                                  (origin_releases_url, len(origin_releases)))
 
         self.assertContains(resp, '<li class="swh-branch">',
                             count=len(origin_branches))
 
         url_args['path'] = content_path
 
         for branch in origin_branches:
             query_params['branch'] = branch['name']
-            root_dir_branch_url = \
-                reverse('browse-origin-content',
-                        url_args=url_args,
-                        query_params=query_params)
+            root_dir_branch_url = reverse('browse-origin-content',
+                                          url_args=url_args,
+                                          query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
 
         self.assertContains(resp, '<li class="swh-release">',
                             count=len(origin_releases))
 
         query_params['branch'] = None
         for release in origin_releases:
             query_params['release'] = release['name']
-            root_dir_release_url = \
-                reverse('browse-origin-content',
-                        url_args=url_args,
-                        query_params=query_params)
+            root_dir_release_url = reverse('browse-origin-content',
+                                           url_args=url_args,
+                                           query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
 
         del url_args['origin_type']
 
         url = reverse('browse-origin-content',
                       url_args=url_args,
                       query_params=query_params)
 
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('content.html')
 
-        swh_cnt_id = get_swh_persistent_id('content', content_sha1_git)
+        swh_cnt_id = get_swh_persistent_id('content', content['sha1_git'])
         swh_cnt_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_cnt_id})
         self.assertContains(resp, swh_cnt_id)
         self.assertContains(resp, swh_cnt_id_url)
 
         self.assertContains(resp, 'swh-take-new-snapshot')
 
-    @patch('swh.web.common.origin_visits.get_origin_visits')
-    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
-    @patch('swh.web.browse.views.utils.snapshot_context.service')
-    @patch('swh.web.browse.utils.service')
-    @patch('swh.web.browse.views.utils.snapshot_context.request_content')
-    def test_origin_content_view(self, mock_request_content, mock_utils_service,
-                                 mock_service, mock_get_origin_visit_snapshot,
-                                 mock_get_origin_visits):
-
-        stub_content_text_sha1 = stub_content_text_data['checksums']['sha1']
-        stub_content_text_sha1_git = stub_content_text_data['checksums']['sha1_git']
-        mock_get_origin_visits.return_value = stub_content_origin_visits
-        mock_get_origin_visit_snapshot.return_value = stub_content_origin_snapshot
-        mock_service.lookup_directory_with_path.return_value = \
-            {'target': stub_content_text_sha1}
-        mock_request_content.return_value = stub_content_text_data
-        mock_utils_service.lookup_origin.return_value = stub_content_origin_info
-        mock_utils_service.lookup_snapshot_size.return_value = {
-            'revision': len(stub_content_origin_snapshot[0]),
-            'release': len(stub_content_origin_snapshot[1])
-        }
+    @given(origin_with_multiple_visits())
+    def test_origin_content_view(self, origin):
+
+        origin_visits = self.origin_visit_get(origin['id'])
+
+        def _get_test_data(visit_idx):
+            snapshot = self.snapshot_get(origin_visits[visit_idx]['snapshot'])
+            head_rev_id = snapshot['branches']['HEAD']['target']
+            head_rev = self.revision_get(head_rev_id)
+            dir_content = self.directory_ls(head_rev['directory'])
+            dir_files = [e for e in dir_content if e['type'] == 'file']
+            dir_file = random.choice(dir_files)
+            branches, releases = process_snapshot_branches(snapshot)
+            return {
+                'branches': branches,
+                'releases': releases,
+                'root_dir_sha1': head_rev['directory'],
+                'content': get_content(dir_file['checksums']['sha1']),
+                'visit': origin_visits[visit_idx]
+            }
 
-        self.origin_content_view_helper(stub_content_origin_info,
-                                        stub_content_origin_visits,
-                                        stub_content_origin_snapshot[0],
-                                        stub_content_origin_snapshot[1],
-                                        stub_content_origin_branch,
-                                        stub_content_root_dir,
-                                        stub_content_text_sha1,
-                                        stub_content_text_sha1_git,
-                                        stub_content_text_path,
-                                        stub_content_text_data['raw_data'],
-                                        'cpp')
-
-        self.origin_content_view_helper(stub_content_origin_info,
-                                        stub_content_origin_visits,
-                                        stub_content_origin_snapshot[0],
-                                        stub_content_origin_snapshot[1],
-                                        stub_content_origin_branch,
-                                        stub_content_root_dir,
-                                        stub_content_text_sha1,
-                                        stub_content_text_sha1_git,
-                                        stub_content_text_path,
-                                        stub_content_text_data['raw_data'],
-                                        'cpp',
-                                        visit_id=stub_content_origin_visit_id)
-
-        self.origin_content_view_helper(stub_content_origin_info,
-                                        stub_content_origin_visits,
-                                        stub_content_origin_snapshot[0],
-                                        stub_content_origin_snapshot[1],
-                                        stub_content_origin_branch,
-                                        stub_content_root_dir,
-                                        stub_content_text_sha1,
-                                        stub_content_text_sha1_git,
-                                        stub_content_text_path,
-                                        stub_content_text_data['raw_data'],
-                                        'cpp',
-                                        timestamp=stub_content_origin_visit_unix_ts)
-
-        self.origin_content_view_helper(stub_content_origin_info,
-                                        stub_content_origin_visits,
-                                        stub_content_origin_snapshot[0],
-                                        stub_content_origin_snapshot[1],
-                                        stub_content_origin_branch,
-                                        stub_content_root_dir,
-                                        stub_content_text_sha1,
-                                        stub_content_text_sha1_git,
-                                        stub_content_text_path,
-                                        stub_content_text_data['raw_data'],
-                                        'cpp',
-                                        timestamp=stub_content_origin_visit_iso_date)
+        test_data = _get_test_data(-1)
+
+        self.origin_content_view_helper(origin,
+                                        origin_visits,
+                                        test_data['branches'],
+                                        test_data['releases'],
+                                        test_data['root_dir_sha1'],
+                                        test_data['content'])
+
+        self.origin_content_view_helper(origin,
+                                        origin_visits,
+                                        test_data['branches'],
+                                        test_data['releases'],
+                                        test_data['root_dir_sha1'],
+                                        test_data['content'],
+                                        timestamp=test_data['visit']['date'])
+
+        visit_unix_ts = parse_timestamp(test_data['visit']['date']).timestamp()
+        visit_unix_ts = int(visit_unix_ts)
+
+        self.origin_content_view_helper(origin,
+                                        origin_visits,
+                                        test_data['branches'],
+                                        test_data['releases'],
+                                        test_data['root_dir_sha1'],
+                                        test_data['content'],
+                                        timestamp=visit_unix_ts)
+
+        test_data = _get_test_data(0)
+
+        self.origin_content_view_helper(origin,
+                                        origin_visits,
+                                        test_data['branches'],
+                                        test_data['releases'],
+                                        test_data['root_dir_sha1'],
+                                        test_data['content'],
+                                        visit_id=test_data['visit']['visit'])
 
     def origin_directory_view_helper(self, origin_info, origin_visits,
-                                     origin_branches, origin_releases, origin_branch,
+                                     origin_branches, origin_releases,
                                      root_directory_sha1, directory_entries,
                                      visit_id=None, timestamp=None, path=None):
 
         dirs = [e for e in directory_entries
                 if e['type'] in ('dir', 'rev')]
         files = [e for e in directory_entries
                  if e['type'] == 'file']
 
         if not visit_id:
             visit_id = origin_visits[-1]['visit']
 
         url_args = {'origin_url': origin_info['url']}
 
         query_params = {}
 
         if timestamp:
             url_args['timestamp'] = timestamp
         else:
             query_params['visit_id'] = visit_id
 
         if path:
             url_args['path'] = path
 
         url = reverse('browse-origin-directory',
                       url_args=url_args,
                       query_params=query_params)
 
         resp = self.client.get(url)
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('directory.html')
 
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('directory.html')
 
-
         self.assertContains(resp, '<td class="swh-directory">',
                             count=len(dirs))
         self.assertContains(resp, '<td class="swh-content">',
                             count=len(files))
 
         if timestamp:
             url_args['timestamp'] = \
                 format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
                                     '%Y-%m-%dT%H:%M:%S')
 
         for d in dirs:
             if d['type'] == 'rev':
                 dir_url = reverse('browse-revision',
                                   url_args={'sha1_git': d['target']})
             else:
                 dir_path = d['name']
                 if path:
                     dir_path = "%s/%s" % (path, d['name'])
                 dir_url_args = dict(url_args)
                 dir_url_args['path'] = dir_path
                 dir_url = reverse('browse-origin-directory',
-                                url_args=dir_url_args,
-                                query_params=query_params)
+                                  url_args=dir_url_args,
+                                  query_params=query_params)
             self.assertContains(resp, dir_url)
 
         for f in files:
             file_path = f['name']
             if path:
                 file_path = "%s/%s" % (path, f['name'])
             file_url_args = dict(url_args)
             file_url_args['path'] = file_path
             file_url = reverse('browse-origin-content',
                                url_args=file_url_args,
                                query_params=query_params)
             self.assertContains(resp, file_url)
 
         if 'path' in url_args:
             del url_args['path']
 
         root_dir_branch_url = \
             reverse('browse-origin-directory',
                     url_args=url_args,
                     query_params=query_params)
 
         nb_bc_paths = 1
         if path:
             nb_bc_paths = len(path.split('/')) + 1
 
         self.assertContains(resp, '<li class="swh-path">', count=nb_bc_paths)
         self.assertContains(resp, '<a href="%s">%s</a>' %
                                   (root_dir_branch_url,
                                    root_directory_sha1[:7]))
 
-        origin_branches_url = \
-                reverse('browse-origin-branches',
-                        url_args=url_args,
-                        query_params=query_params)
+        origin_branches_url = reverse('browse-origin-branches',
+                                      url_args=url_args,
+                                      query_params=query_params)
 
         self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
-            (origin_branches_url, len(origin_branches)))
+                                  (origin_branches_url, len(origin_branches)))
 
-        origin_releases_url = \
-                reverse('browse-origin-releases',
-                        url_args=url_args,
-                        query_params=query_params)
+        origin_releases_url = reverse('browse-origin-releases',
+                                      url_args=url_args,
+                                      query_params=query_params)
 
-        self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
-            (origin_releases_url, len(origin_releases)))
+        nb_releases = len(origin_releases)
+        if nb_releases > 0:
+            self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
+                                      (origin_releases_url, nb_releases))
 
         if path:
             url_args['path'] = path
 
         self.assertContains(resp, '<li class="swh-branch">',
                             count=len(origin_branches))
 
         for branch in origin_branches:
             query_params['branch'] = branch['name']
             root_dir_branch_url = \
                 reverse('browse-origin-directory',
                         url_args=url_args,
                         query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
 
         self.assertContains(resp, '<li class="swh-release">',
                             count=len(origin_releases))
 
         query_params['branch'] = None
         for release in origin_releases:
             query_params['release'] = release['name']
             root_dir_release_url = \
                 reverse('browse-origin-directory',
                         url_args=url_args,
                         query_params=query_params)
 
             self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
 
         self.assertContains(resp, 'vault-cook-directory')
         self.assertContains(resp, 'vault-cook-revision')
 
         swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
         swh_dir_id_url = reverse('browse-swh-id',
                                  url_args={'swh_id': swh_dir_id})
         self.assertContains(resp, swh_dir_id)
         self.assertContains(resp, swh_dir_id_url)
 
         self.assertContains(resp, 'swh-take-new-snapshot')
 
+    @given(origin())
+    def test_origin_root_directory_view(self, origin):
+
+        origin_visits = self.origin_visit_get(origin['id'])
+
+        visit = origin_visits[-1]
+        snapshot = self.snapshot_get(visit['snapshot'])
+        head_rev_id = snapshot['branches']['HEAD']['target']
+        head_rev = self.revision_get(head_rev_id)
+        root_dir_sha1 = head_rev['directory']
+        dir_content = self.directory_ls(root_dir_sha1)
+        branches, releases = process_snapshot_branches(snapshot)
+        visit_unix_ts = parse_timestamp(visit['date']).timestamp()
+        visit_unix_ts = int(visit_unix_ts)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content,
+                                          visit_id=visit['visit'])
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content,
+                                          timestamp=visit_unix_ts)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content,
+                                          timestamp=visit['date'])
+
+        origin = dict(origin)
+        del origin['type']
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content,
+                                          visit_id=visit['visit'])
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content,
+                                          timestamp=visit_unix_ts)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          dir_content,
+                                          timestamp=visit['date'])
+
+    @given(origin())
+    def test_origin_sub_directory_view(self, origin):
+
+        origin_visits = self.origin_visit_get(origin['id'])
+
+        visit = origin_visits[-1]
+        snapshot = self.snapshot_get(visit['snapshot'])
+        head_rev_id = snapshot['branches']['HEAD']['target']
+        head_rev = self.revision_get(head_rev_id)
+        root_dir_sha1 = head_rev['directory']
+        subdirs = [e for e in self.directory_ls(root_dir_sha1)
+                   if e['type'] == 'dir']
+        branches, releases = process_snapshot_branches(snapshot)
+        visit_unix_ts = parse_timestamp(visit['date']).timestamp()
+        visit_unix_ts = int(visit_unix_ts)
+
+        if len(subdirs) == 0:
+            return
+
+        subdir = random.choice(subdirs)
+        subdir_content = self.directory_ls(subdir['target'])
+        subdir_path = subdir['name']
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path,
+                                          visit_id=visit['visit'])
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path,
+                                          timestamp=visit_unix_ts)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path,
+                                          timestamp=visit['date'])
+
+        origin = dict(origin)
+        del origin['type']
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path,
+                                          visit_id=visit['visit'])
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path,
+                                          timestamp=visit_unix_ts)
+
+        self.origin_directory_view_helper(origin, origin_visits,
+                                          branches,
+                                          releases,
+                                          root_dir_sha1,
+                                          subdir_content,
+                                          path=subdir_path,
+                                          timestamp=visit['date'])
 
-    @patch('swh.web.common.origin_visits.get_origin_visits')
-    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
-    @patch('swh.web.browse.utils.service')
-    @patch('swh.web.browse.views.origin.service')
-    def test_origin_root_directory_view(self, mock_origin_service,
-                                        mock_utils_service,
-                                        mock_get_origin_visit_snapshot,
-                                        mock_get_origin_visits):
-
-        mock_get_origin_visits.return_value = stub_origin_visits
-        mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
-        mock_utils_service.lookup_directory.return_value = \
-            stub_origin_root_directory_entries
-        mock_utils_service.lookup_origin.return_value = stub_origin_info
-        mock_utils_service.lookup_snapshot_size.return_value = {
-            'revision': len(stub_origin_snapshot[0]),
-            'release': len(stub_origin_snapshot[1])
-        }
+    def origin_branches_helper(self, origin_info, origin_snapshot):
+        url_args = {'origin_type': origin_info['type'],
+                    'origin_url': origin_info['url']}
 
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries)
-
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries,
-                                          visit_id=stub_visit_id)
-
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries,
-                                          timestamp=stub_visit_unix_ts)
-
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries,
-                                          timestamp=stub_visit_iso_date)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries,
-                                          visit_id=stub_visit_id)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries,
-                                          timestamp=stub_visit_unix_ts)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_root_directory_entries,
-                                          timestamp=stub_visit_iso_date)
+        url = reverse('browse-origin-branches',
+                      url_args=url_args)
 
-    @patch('swh.web.common.origin_visits.get_origin_visits')
-    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
-    @patch('swh.web.browse.utils.service')
-    @patch('swh.web.browse.views.utils.snapshot_context.service')
-    def test_origin_sub_directory_view(self, mock_origin_service,
-                                       mock_utils_service,
-                                       mock_get_origin_visit_snapshot,
-                                       mock_get_origin_visits):
-
-        mock_get_origin_visits.return_value = stub_origin_visits
-        mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
-        mock_utils_service.lookup_directory.return_value = \
-            stub_origin_sub_directory_entries
-        mock_origin_service.lookup_directory_with_path.return_value = \
-            {'target': stub_origin_sub_directory_entries[0]['dir_id'],
-             'type' : 'dir'}
-        mock_utils_service.lookup_origin.return_value = stub_origin_info
-        mock_utils_service.lookup_snapshot_size.return_value = {
-            'revision': len(stub_origin_snapshot[0]),
-            'release': len(stub_origin_snapshot[1])
-        }
+        resp = self.client.get(url)
+
+        self.assertEqual(resp.status_code, 200)
+        self.assertTemplateUsed('branches.html')
+
+        origin_branches = origin_snapshot[0]
+        origin_releases = origin_snapshot[1]
+
+        origin_branches_url = reverse('browse-origin-branches',
+                                      url_args=url_args)
 
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          visit_id=stub_visit_id,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          timestamp=stub_visit_unix_ts,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          timestamp=stub_visit_iso_date,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          visit_id=stub_visit_id,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          timestamp=stub_visit_unix_ts,
-                                          path=stub_origin_sub_directory_path)
-
-        self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
-                                          stub_origin_snapshot[0],
-                                          stub_origin_snapshot[1],
-                                          stub_origin_master_branch,
-                                          stub_origin_root_directory_sha1,
-                                          stub_origin_sub_directory_entries,
-                                          timestamp=stub_visit_iso_date,
-                                          path=stub_origin_sub_directory_path)
+        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
+                                  (origin_branches_url, len(origin_branches)))
+
+        origin_releases_url = reverse('browse-origin-releases',
+                                      url_args=url_args)
+
+        nb_releases = len(origin_releases)
+        if nb_releases > 0:
+            self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
+                                      (origin_releases_url, nb_releases))
+
+        self.assertContains(resp, '<tr class="swh-branch-entry',
+                            count=len(origin_branches))
+
+        for branch in origin_branches:
+            browse_branch_url = reverse(
+                'browse-origin-directory',
+                url_args={'origin_type': origin_info['type'],
+                          'origin_url': origin_info['url']},
+                query_params={'branch': branch['name']})
+            self.assertContains(resp, '<a href="%s">' %
+                                      escape(browse_branch_url))
+
+            browse_revision_url = reverse(
+                'browse-revision',
+                url_args={'sha1_git': branch['revision']},
+                query_params={'origin_type': origin_info['type'],
+                              'origin': origin_info['url']})
+            self.assertContains(resp, '<a href="%s">' %
+                                      escape(browse_revision_url))
+
+    @given(origin())
+    def test_origin_branches(self, origin):
+
+        origin_visits = self.origin_visit_get(origin['id'])
+
+        visit = origin_visits[-1]
+        snapshot = self.snapshot_get(visit['snapshot'])
+        snapshot_content = process_snapshot_branches(snapshot)
+
+        self.origin_branches_helper(origin, snapshot_content)
+
+        origin = dict(origin)
+        origin['type'] = None
+
+        self.origin_branches_helper(origin, snapshot_content)
+
+    def origin_releases_helper(self, origin_info, origin_snapshot):
+        url_args = {'origin_type': origin_info['type'],
+                    'origin_url': origin_info['url']}
+
+        url = reverse('browse-origin-releases',
+                      url_args=url_args)
+
+        resp = self.client.get(url)
+        self.assertEqual(resp.status_code, 200)
+        self.assertTemplateUsed('releases.html')
+
+        origin_branches = origin_snapshot[0]
+        origin_releases = origin_snapshot[1]
+
+        origin_branches_url = reverse('browse-origin-branches',
+                                      url_args=url_args)
+
+        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
+                                  (origin_branches_url, len(origin_branches)))
+
+        origin_releases_url = reverse('browse-origin-releases',
+                                      url_args=url_args)
+
+        nb_releases = len(origin_releases)
+        if nb_releases > 0:
+            self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
+                                      (origin_releases_url, nb_releases))
+
+        self.assertContains(resp, '<tr class="swh-release-entry',
+                            count=nb_releases)
+
+        for release in origin_releases:
+            browse_release_url = reverse(
+                'browse-release',
+                url_args={'sha1_git': release['id']},
+                query_params={'origin': origin_info['url']})
+            browse_revision_url = reverse(
+                'browse-revision',
+                url_args={'sha1_git': release['target']},
+                query_params={'origin': origin_info['url']})
+
+            self.assertContains(resp, '<a href="%s">' %
+                                      escape(browse_release_url))
+            self.assertContains(resp, '<a href="%s">' %
+                                      escape(browse_revision_url))
+
+    @given(origin())
+    def test_origin_releases(self, origin):
+
+        origin_visits = self.origin_visit_get(origin['id'])
+
+        visit = origin_visits[-1]
+        snapshot = self.snapshot_get(visit['snapshot'])
+        snapshot_content = process_snapshot_branches(snapshot)
+
+        self.origin_releases_helper(origin, snapshot_content)
+
+        origin = dict(origin)
+        origin['type'] = None
+
+        self.origin_releases_helper(origin, snapshot_content)
 
     @patch('swh.web.browse.views.utils.snapshot_context.request_content')
     @patch('swh.web.common.origin_visits.get_origin_visits')
     @patch('swh.web.browse.utils.get_origin_visit_snapshot')
     @patch('swh.web.browse.utils.service')
     @patch('swh.web.browse.views.origin.service')
     @patch('swh.web.browse.views.utils.snapshot_context.service')
     @patch('swh.web.browse.views.origin.get_origin_info')
     def test_origin_request_errors(self, mock_get_origin_info,
                                    mock_snapshot_service,
                                    mock_origin_service,
                                    mock_utils_service,
                                    mock_get_origin_visit_snapshot,
                                    mock_get_origin_visits,
                                    mock_request_content):
 
         mock_get_origin_info.side_effect = \
             NotFoundExc('origin not found')
         url = reverse('browse-origin-visits',
                       url_args={'origin_type': 'foo',
                                 'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'origin not found', status_code=404)
 
         mock_utils_service.lookup_origin.side_effect = None
-        mock_utils_service.lookup_origin.return_value = origin_info_test_data
+        mock_utils_service.lookup_origin.return_value = {'type': 'foo',
+                                                         'url': 'bar',
+                                                         'id': 457}
         mock_get_origin_visits.return_value = []
         url = reverse('browse-origin-directory',
                       url_args={'origin_type': 'foo',
                                 'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, "No visit", status_code=404)
 
-        mock_get_origin_visits.return_value = stub_origin_visits
+        mock_get_origin_visits.return_value = [{'visit': 1}]
         mock_get_origin_visit_snapshot.side_effect = \
             NotFoundExc('visit not found')
         url = reverse('browse-origin-directory',
                       url_args={'origin_type': 'foo',
                                 'origin_url': 'bar'},
-                      query_params={'visit_id': len(stub_origin_visits)+1})
+                      query_params={'visit_id': 2})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
 
-        mock_get_origin_visits.return_value = stub_origin_visits
+        mock_get_origin_visits.return_value = [{
+            'date': '2015-09-26T09:30:52.373449+00:00',
+            'metadata': {},
+            'origin': 457,
+            'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
+            'status': 'full',
+            'visit': 1
+        }]
         mock_get_origin_visit_snapshot.side_effect = None
-        mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
+        mock_get_origin_visit_snapshot.return_value = (
+            [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
+              'name': 'HEAD',
+              'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
+              'date': '04 May 2017, 13:27 UTC',
+              'message': ''}],
+            []
+        )
         mock_utils_service.lookup_snapshot_size.return_value = {
-            'revision': len(stub_origin_snapshot[0]),
-            'release': len(stub_origin_snapshot[1])
+            'revision': 1,
+            'release': 0
         }
         mock_utils_service.lookup_directory.side_effect = \
             NotFoundExc('Directory not found')
         url = reverse('browse-origin-directory',
                       url_args={'origin_type': 'foo',
                                 'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'Directory not found', status_code=404)
 
-        with patch('swh.web.browse.views.utils.snapshot_context.get_snapshot_context') \
-                as mock_get_snapshot_context:
+        with patch('swh.web.browse.views.utils.snapshot_context.'
+                   'get_snapshot_context') as mock_get_snapshot_context:
             mock_get_snapshot_context.side_effect = \
                 NotFoundExc('Snapshot not found')
             url = reverse('browse-origin-directory',
                           url_args={'origin_type': 'foo',
                                     'origin_url': 'bar'})
             resp = self.client.get(url)
             self.assertEqual(resp.status_code, 404)
             self.assertTemplateUsed('error.html')
             self.assertContains(resp, 'Snapshot not found', status_code=404)
 
         mock_origin_service.lookup_origin.side_effect = None
-        mock_origin_service.lookup_origin.return_value = origin_info_test_data
+        mock_origin_service.lookup_origin.return_value = {'type': 'foo',
+                                                          'url': 'bar',
+                                                          'id': 457}
         mock_get_origin_visits.return_value = []
         url = reverse('browse-origin-content',
                       url_args={'origin_type': 'foo',
                                 'origin_url': 'bar',
                                 'path': 'foo'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, "No visit", status_code=404)
 
-        mock_get_origin_visits.return_value = stub_origin_visits
+        mock_get_origin_visits.return_value = [{'visit': 1}]
         mock_get_origin_visit_snapshot.side_effect = \
             NotFoundExc('visit not found')
         url = reverse('browse-origin-content',
                       url_args={'origin_type': 'foo',
                                 'origin_url': 'bar',
                                 'path': 'foo'},
-                      query_params={'visit_id': len(stub_origin_visits)+1})
+                      query_params={'visit_id': 2})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
 
-        mock_get_origin_visits.return_value = stub_origin_visits
+        mock_get_origin_visits.return_value = [{
+            'date': '2015-09-26T09:30:52.373449+00:00',
+            'metadata': {},
+            'origin': 457,
+            'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
+            'status': 'full',
+            'visit': 1
+        }]
         mock_get_origin_visit_snapshot.side_effect = None
         mock_get_origin_visit_snapshot.return_value = ([], [])
         url = reverse('browse-origin-content',
                       url_args={'origin_type': 'foo',
-                              'origin_url': 'bar',
-                              'path': 'baz'})
+                                'origin_url': 'bar',
+                                'path': 'baz'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertRegex(resp.content.decode('utf-8'),
                          'Origin.*has an empty list of branches')
 
-        mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
+        mock_get_origin_visit_snapshot.return_value = (
+            [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
+              'name': 'HEAD',
+              'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
+              'date': '04 May 2017, 13:27 UTC',
+              'message': ''}],
+            []
+        )
         mock_snapshot_service.lookup_directory_with_path.return_value = \
-            {'target': stub_content_text_data['checksums']['sha1']}
+            {'target': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1'}
         mock_request_content.side_effect = \
             NotFoundExc('Content not found')
         url = reverse('browse-origin-content',
                       url_args={'origin_type': 'foo',
-                              'origin_url': 'bar',
-                              'path': 'baz'})
+                                'origin_url': 'bar',
+                                'path': 'baz'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 404)
         self.assertTemplateUsed('error.html')
         self.assertContains(resp, 'Content not found', status_code=404)
 
-
     @patch('swh.web.common.origin_visits.get_origin_visits')
     @patch('swh.web.browse.utils.get_origin_visit_snapshot')
     @patch('swh.web.browse.utils.service')
     def test_origin_empty_snapshot(self, mock_utils_service,
                                    mock_get_origin_visit_snapshot,
                                    mock_get_origin_visits):
 
-        mock_get_origin_visits.return_value = stub_origin_visits
+        mock_get_origin_visits.return_value = [{
+            'date': '2015-09-26T09:30:52.373449+00:00',
+            'metadata': {},
+            'origin': 457,
+            'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
+            'status': 'full',
+            'visit': 1
+        }]
         mock_get_origin_visit_snapshot.return_value = ([], [])
         mock_utils_service.lookup_snapshot_size.return_value = {
             'revision': 0,
             'release': 0
         }
         url = reverse('browse-origin-directory',
                       url_args={'origin_type': 'foo',
-                              'origin_url': 'bar'})
+                                'origin_url': 'bar'})
         resp = self.client.get(url)
         self.assertEqual(resp.status_code, 200)
         self.assertTemplateUsed('content.html')
         self.assertRegex(resp.content.decode('utf-8'), 'snapshot.*is empty')
-
-    def origin_branches_helper(self, origin_info, origin_snapshot):
-        url_args = {'origin_type': origin_info['type'],
-                    'origin_url': origin_info['url']}
-
-        url = reverse('browse-origin-branches',
-                      url_args=url_args)
-
-        resp = self.client.get(url)
-
-        self.assertEqual(resp.status_code, 200)
-        self.assertTemplateUsed('branches.html')
-
-        origin_branches = origin_snapshot[0]
-        origin_releases = origin_snapshot[1]
-
-        origin_branches_url = \
-                reverse('browse-origin-branches',
-                        url_args=url_args)
-
-        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
-            (origin_branches_url, len(origin_branches)))
-
-        origin_releases_url = \
-                reverse('browse-origin-releases',
-                        url_args=url_args)
-
-        self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
-            (origin_releases_url, len(origin_releases)))
-
-        self.assertContains(resp, '<tr class="swh-branch-entry',
-                            count=len(origin_branches))
-
-        for branch in origin_branches:
-            browse_branch_url = reverse('browse-origin-directory',
-                                        url_args={'origin_type': origin_info['type'],
-                                                'origin_url': origin_info['url']},
-                                        query_params={'branch': branch['name']})
-            self.assertContains(resp, '<a href="%s">' % escape(browse_branch_url))
-
-            browse_revision_url = reverse('browse-revision',
-                                          url_args={'sha1_git': branch['revision']},
-                                          query_params={'origin_type': origin_info['type'],
-                                                        'origin': origin_info['url']})
-            self.assertContains(resp, '<a href="%s">' % escape(browse_revision_url))
-
-
-    @patch('swh.web.browse.views.utils.snapshot_context.process_snapshot_branches')
-    @patch('swh.web.browse.views.utils.snapshot_context.service')
-    @patch('swh.web.common.origin_visits.get_origin_visits')
-    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
-    @patch('swh.web.browse.utils.service')
-    @patch('swh.web.browse.views.origin.service')
-    def test_origin_branches(self, mock_origin_service,
-                             mock_utils_service,
-                             mock_get_origin_visit_snapshot,
-                             mock_get_origin_visits,
-                             mock_snp_ctx_service,
-                             mock_snp_ctx_process_branches):
-        mock_get_origin_visits.return_value = stub_origin_visits
-        mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
-        mock_utils_service.lookup_origin.return_value = stub_origin_info
-        mock_utils_service.lookup_snapshot_size.return_value = \
-            {'revision': len(stub_origin_snapshot[0]), 'release': len(stub_origin_snapshot[1])}
-        mock_snp_ctx_service.lookup_snapshot.return_value = \
-            _to_snapshot_dict(branches=stub_origin_snapshot[0])
-        mock_snp_ctx_process_branches.return_value = stub_origin_snapshot
-
-        self.origin_branches_helper(stub_origin_info, stub_origin_snapshot)
-
-        self.origin_branches_helper(stub_origin_info_no_type, stub_origin_snapshot)
-
-
-    def origin_releases_helper(self, origin_info, origin_snapshot):
-        url_args = {'origin_type': origin_info['type'],
-                    'origin_url': origin_info['url']}
-
-        url = reverse('browse-origin-releases',
-                      url_args=url_args)
-
-        resp = self.client.get(url)
-        self.assertEqual(resp.status_code, 200)
-        self.assertTemplateUsed('releases.html')
-
-        origin_branches = origin_snapshot[0]
-        origin_releases = origin_snapshot[1]
-
-        origin_branches_url = \
-                reverse('browse-origin-branches',
-                        url_args=url_args)
-
-        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
-            (origin_branches_url, len(origin_branches)))
-
-        origin_releases_url = \
-                reverse('browse-origin-releases',
-                        url_args=url_args)
-
-        self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
-            (origin_releases_url, len(origin_releases)))
-
-        self.assertContains(resp, '<tr class="swh-release-entry',
-                            count=len(origin_releases))
-
-        for release in origin_releases:
-            browse_release_url = reverse('browse-release',
-                                         url_args={'sha1_git': release['id']},
-                                         query_params={'origin': origin_info['url']})
-            browse_revision_url = reverse('browse-revision',
-                                          url_args={'sha1_git': release['target']},
-                                          query_params={'origin': origin_info['url']})
-
-            self.assertContains(resp, '<a href="%s">' % escape(browse_release_url))
-            self.assertContains(resp, '<a href="%s">' % escape(browse_revision_url))
-
-
-    @patch('swh.web.browse.views.utils.snapshot_context.process_snapshot_branches')
-    @patch('swh.web.browse.views.utils.snapshot_context.service')
-    @patch('swh.web.common.origin_visits.get_origin_visits')
-    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
-    @patch('swh.web.browse.utils.service')
-    @patch('swh.web.browse.views.origin.service')
-    def test_origin_releases(self, mock_origin_service,
-                             mock_utils_service,
-                             mock_get_origin_visit_snapshot,
-                             mock_get_origin_visits,
-                             mock_snp_ctx_service,
-                             mock_snp_ctx_process_branches):
-        mock_get_origin_visits.return_value = stub_origin_visits
-        mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
-        mock_utils_service.lookup_origin.return_value = stub_origin_info
-        mock_utils_service.lookup_snapshot_size.return_value = \
-            {'revision': len(stub_origin_snapshot[0]), 'release': len(stub_origin_snapshot[1])}
-        mock_snp_ctx_service.lookup_snapshot.return_value = \
-            _to_snapshot_dict(releases=stub_origin_snapshot[1])
-        mock_snp_ctx_process_branches.return_value = stub_origin_snapshot
-
-        self.origin_releases_helper(stub_origin_info, stub_origin_snapshot)
-        self.origin_releases_helper(stub_origin_info_no_type, stub_origin_snapshot)
-
diff --git a/swh/web/tests/data.py b/swh/web/tests/data.py
index 3204aed2..499a78fb 100644
--- a/swh/web/tests/data.py
+++ b/swh/web/tests/data.py
@@ -1,276 +1,284 @@
 # Copyright (C) 2018-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import os
 import time
 
 from swh.indexer.language import LanguageIndexer
 from swh.indexer.fossology_license import FossologyLicenseIndexer
 from swh.indexer.mimetype import MimetypeIndexer
 from swh.indexer.ctags import CtagsIndexer
 from swh.indexer.storage import get_indexer_storage
 from swh.model.hashutil import hash_to_hex, hash_to_bytes, DEFAULT_ALGORITHMS
 from swh.model.identifiers import directory_identifier
 from swh.loader.git.from_disk import GitLoaderFromArchive
 from swh.storage.algos.dir_iterators import dir_iterator
 from swh.web.browse.utils import (
     get_mimetype_and_encoding_for_content, prepare_content_for_display
 )
 
 # Module used to initialize data that will be provided as tests input
 
 # Configuration for git loader
 _TEST_LOADER_CONFIG = {
     'storage': {
         'cls': 'memory',
         'args': {}
     },
     'send_contents': True,
     'send_directories': True,
     'send_revisions': True,
     'send_releases': True,
     'send_snapshot': True,
 
     'content_size_limit': 100 * 1024 * 1024,
     'content_packet_size': 10,
     'content_packet_size_bytes': 100 * 1024 * 1024,
     'directory_packet_size': 10,
     'revision_packet_size': 10,
     'release_packet_size': 10,
 
     'save_data': False,
 }
 
 # Base content indexer configuration
 _TEST_INDEXER_BASE_CONFIG = {
     'storage': {
         'cls': 'memory',
         'args': {},
     },
     'objstorage': {
         'cls': 'memory',
         'args': {},
     },
     'indexer_storage': {
         'cls': 'memory',
         'args': {},
     }
 }
 
 
 # MimetypeIndexer with custom configuration for tests
 class _MimetypeIndexer(MimetypeIndexer):
     def parse_config_file(self, *args, **kwargs):
         return {
             **_TEST_INDEXER_BASE_CONFIG,
             'tools': {
                 'name': 'file',
                 'version': '1:5.30-1+deb9u1',
                 'configuration': {
                     "type": "library",
                     "debian-package": "python3-magic"
                 }
             }
         }
 
 
 # LanguageIndexer with custom configuration for tests
 class _LanguageIndexer(LanguageIndexer):
     def parse_config_file(self, *args, **kwargs):
         return {
             **_TEST_INDEXER_BASE_CONFIG,
             'tools': {
                 'name': 'pygments',
                 'version': '2.0.1+dfsg-1.1+deb8u1',
                 'configuration': {
                     'type': 'library',
                     'debian-package': 'python3-pygments',
                     'max_content_size': 10240,
                 }
             }
         }
 
 
 # FossologyLicenseIndexer with custom configuration for tests
 class _FossologyLicenseIndexer(FossologyLicenseIndexer):
     def parse_config_file(self, *args, **kwargs):
         return {
             **_TEST_INDEXER_BASE_CONFIG,
             'workdir': '/tmp/swh/indexer.fossology.license',
             'tools': {
                 'name': 'nomos',
                 'version': '3.1.0rc2-31-ga2cbb8c',
                 'configuration': {
                     'command_line': 'nomossa <filepath>',
                 },
             }
         }
 
 
 # CtagsIndexer with custom configuration for tests
 class _CtagsIndexer(CtagsIndexer):
     def parse_config_file(self, *args, **kwargs):
         return {
             **_TEST_INDEXER_BASE_CONFIG,
             'workdir': '/tmp/swh/indexer.ctags',
             'languages': {'c': 'c'},
             'tools': {
                 'name': 'universal-ctags',
                 'version': '~git7859817b',
                 'configuration': {
                     'command_line': '''ctags --fields=+lnz --sort=no --links=no ''' # noqa
                                     '''--output-format=json <filepath>'''
                 },
             }
         }
 
 
 # Lightweight git repositories that will be loaded to generate
 # input data for tests
 _TEST_ORIGINS = [
     {
         'id': 1,
         'type': 'git',
         'url': 'https://github.com/wcoder/highlightjs-line-numbers.js',
         'archives': ['highlightjs-line-numbers.js.zip',
                      'highlightjs-line-numbers.js_visit2.zip']
     },
     {
         'id': 2,
         'type': 'git',
         'url': 'https://github.com/memononen/libtess2',
         'archives': ['libtess2.zip']
     },
     {
         'id': 3,
         'type': 'git',
         'url': 'repo_with_submodules',
         'archives': ['repo_with_submodules.tgz']
     }
 ]
 
+_contents = {}
+
 
 # Tests data initialization
 def _init_tests_data():
     # Load git repositories from archives
     loader = GitLoaderFromArchive(config=_TEST_LOADER_CONFIG)
     for origin in _TEST_ORIGINS:
         nb_visits = len(origin['archives'])
         for i, archive in enumerate(origin['archives']):
             origin_repo_archive = \
                 os.path.join(os.path.dirname(__file__),
                              'resources/repos/%s' % archive)
             loader.load(origin['url'], origin_repo_archive, None)
             if nb_visits > 1 and i != nb_visits - 1:
                 time.sleep(1)
 
     # Get reference to the memory storage
     storage = loader.storage
 
     contents = set()
     directories = set()
     revisions = set()
     releases = set()
     snapshots = set()
     persons = set()
 
     content_path = {}
 
     # Get all objects loaded into the test archive
     for origin in _TEST_ORIGINS:
         snp = storage.snapshot_get_latest(origin['id'])
         snapshots.add(hash_to_hex(snp['id']))
         for branch_name, branch_data in snp['branches'].items():
             if branch_data['target_type'] == 'revision':
                 revisions.add(branch_data['target'])
             elif branch_data['target_type'] == 'release':
                 release = next(storage.release_get([branch_data['target']]))
                 revisions.add(release['target'])
                 releases.add(hash_to_hex(branch_data['target']))
                 persons.add(release['author']['id'])
 
         for rev_log in storage.revision_shortlog(set(revisions)):
             rev_id = rev_log[0]
             revisions.add(rev_id)
 
         for rev in storage.revision_get(revisions):
             dir_id = rev['directory']
             persons.add(rev['author']['id'])
             persons.add(rev['committer']['id'])
             directories.add(hash_to_hex(dir_id))
             for entry in dir_iterator(storage, dir_id):
                 content_path[entry['sha1']] = '/'.join(
                     [hash_to_hex(dir_id), entry['path'].decode('utf-8')])
                 if entry['type'] == 'file':
                     contents.add(entry['sha1'])
                 elif entry['type'] == 'dir':
                     directories.add(hash_to_hex(entry['target']))
 
     # Get all checksums for each content
     contents_metadata = storage.content_get_metadata(contents)
     contents = []
     for content_metadata in contents_metadata:
         contents.append({
             algo: hash_to_hex(content_metadata[algo])
             for algo in DEFAULT_ALGORITHMS
         })
         path = content_path[content_metadata['sha1']]
         cnt = next(storage.content_get([content_metadata['sha1']]))
         mimetype, encoding = get_mimetype_and_encoding_for_content(cnt['data'])
         content_display_data = prepare_content_for_display(
             cnt['data'], mimetype, path)
         contents[-1]['path'] = path
         contents[-1]['mimetype'] = mimetype
         contents[-1]['encoding'] = encoding
-        contents[-1]['hljs-language'] = content_display_data['language']
+        contents[-1]['hljs_language'] = content_display_data['language']
+        contents[-1]['data'] = content_display_data['content_data']
+        _contents[contents[-1]['sha1']] = contents[-1]
 
     # Create indexer storage instance that will be shared by indexers
     idx_storage = get_indexer_storage('memory', {})
 
     # Instantiate content indexers that will be used in tests
     # and force them to use the memory storages
     indexers = {}
     for idx_name, idx_class in (('mimetype_indexer', _MimetypeIndexer),
                                 ('language_indexer', _LanguageIndexer),
                                 ('license_indexer', _FossologyLicenseIndexer),
                                 ('ctags_indexer', _CtagsIndexer)):
         idx = idx_class()
         idx.storage = storage
         idx.objstorage = storage.objstorage
         idx.idx_storage = idx_storage
         idx.register_tools(idx.config['tools'])
         indexers[idx_name] = idx
 
     # Add the empty directory to the test archive
     empty_dir_id = directory_identifier({'entries': []})
     empty_dir_id_bin = hash_to_bytes(empty_dir_id)
     storage.directory_add([{'id': empty_dir_id_bin, 'entries': []}])
 
     # Return tests data
     return {
         'storage': storage,
         'idx_storage': idx_storage,
         **indexers,
         'origins': _TEST_ORIGINS,
         'contents': contents,
         'directories': list(directories),
         'persons': list(persons),
         'releases': list(releases),
         'revisions': list(map(hash_to_hex, revisions)),
         'snapshots': list(snapshots)
     }
 
 
+def get_content(content_sha1):
+    return _contents.get(content_sha1)
+
+
 _tests_data = None
 
 
 def get_tests_data():
     """
     Initialize tests data and return them in a dict.
     """
     global _tests_data
     if _tests_data is None:
         _tests_data = _init_tests_data()
     return _tests_data
diff --git a/swh/web/tests/strategies.py b/swh/web/tests/strategies.py
index 13fdaca5..6a26da2b 100644
--- a/swh/web/tests/strategies.py
+++ b/swh/web/tests/strategies.py
@@ -1,524 +1,524 @@
 # Copyright (C) 2018-2019  The Software Heritage developers
 # See the AUTHORS file at the top-level directory of this distribution
 # License: GNU Affero General Public License version 3, or any later version
 # See top-level LICENSE file for more information
 
 import random
 
 from collections import defaultdict
 from datetime import datetime
 
 from hypothesis import settings, assume
 from hypothesis.strategies import (
     just, sampled_from, lists, composite, datetimes,
     integers, binary, text, characters
 )
 
 from swh.model.hashutil import hash_to_hex, hash_to_bytes
 from swh.model.identifiers import directory_identifier
 from swh.storage.algos.revisions_walker import get_revisions_walker
 from swh.storage.tests.algos.test_snapshot import ( # noqa
     origins as new_origin_strategy, snapshots as new_snapshot
 )
 from swh.web.tests.data import get_tests_data
 
 # Module dedicated to the generation of input data for tests through
 # the use of hypothesis.
 # Some of these data are sampled from a test archive created and populated
 # in the swh.web.tests.data module.
 
 # Set the swh-web hypothesis profile if none has been explicitly set
 hypothesis_default_settings = settings.get_profile('default')
 if repr(settings()) == repr(hypothesis_default_settings):
     settings.load_profile('swh-web')
 
 # Import tests data
 tests_data = get_tests_data()
 storage = tests_data['storage']
 
 
 # The following strategies exploit the hypothesis capabilities
 
 
 def _known_swh_object(object_type):
     return sampled_from(tests_data[object_type])
 
 
 def sha1():
     """
     Hypothesis strategy returning a valid hexadecimal sha1 value.
     """
     return binary(
         min_size=20, max_size=20).filter(
             lambda s: int.from_bytes(s, byteorder='little')).map(hash_to_hex)
 
 
 def invalid_sha1():
     """
     Hypothesis strategy returning an invalid sha1 representation.
     """
     return binary(
         min_size=50, max_size=50).filter(
             lambda s: int.from_bytes(s, byteorder='little')).map(hash_to_hex)
 
 
 def sha256():
     """
     Hypothesis strategy returning a valid hexadecimal sha256 value.
     """
     return binary(
         min_size=32, max_size=32).filter(
             lambda s: int.from_bytes(s, byteorder='little')).map(hash_to_hex)
 
 
 def content():
     """
     Hypothesis strategy returning a random content ingested
     into the test archive.
     """
     return _known_swh_object('contents')
 
 
 def contents():
     """
     Hypothesis strategy returning random contents ingested
     into the test archive.
     """
     return lists(content(), min_size=2, max_size=8)
 
 
 def content_text():
     """
     Hypothesis strategy returning random textual contents ingested
     into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('text/'))
 
 
 def content_text_non_utf8():
     """
     Hypothesis strategy returning random textual contents not encoded
     to UTF-8 ingested into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('text/') and
                             c['encoding'] not in ('utf-8', 'us-ascii'))
 
 
 def content_text_no_highlight():
     """
     Hypothesis strategy returning random textual contents with no detected
     programming language to highlight ingested into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('text/') and
-                            c['hljs-language'] == 'nohighlight')
+                            c['hljs_language'] == 'nohighlight')
 
 
 def content_image_type():
     """
     Hypothesis strategy returning random image contents ingested
     into the test archive.
     """
     return content().filter(lambda c: c['mimetype'].startswith('image/'))
 
 
 @composite
 def new_content(draw):
     blake2s256_hex = draw(sha256())
     sha1_hex = draw(sha1())
     sha1_git_hex = draw(sha1())
     sha256_hex = draw(sha256())
 
     assume(sha1_hex != sha1_git_hex)
     assume(blake2s256_hex != sha256_hex)
 
     return {
         'blake2S256': blake2s256_hex,
         'sha1': sha1_hex,
         'sha1_git': sha1_git_hex,
         'sha256': sha256_hex
     }
 
 
 def unknown_content():
     """
     Hypothesis strategy returning a random content not ingested
     into the test archive.
     """
     return new_content().filter(
         lambda c: next(storage.content_get(
             [hash_to_bytes(c['sha1'])])) is None)
 
 
 def unknown_contents():
     """
     Hypothesis strategy returning random contents not ingested
     into the test archive.
     """
     return lists(unknown_content(), min_size=2, max_size=8)
 
 
 def directory():
     """
     Hypothesis strategy returning a random directory ingested
     into the test archive.
     """
     return _known_swh_object('directories')
 
 
 def directory_with_subdirs():
     """
     Hypothesis strategy returning a random directory containing
     sub directories ingested into the test archive.
     """
     return directory().filter(
         lambda d: any([e['type'] == 'dir'
                       for e in list(storage.directory_ls(hash_to_bytes(d)))]))
 
 
 def empty_directory():
     """
     Hypothesis strategy returning the empty directory ingested
     into the test archive.
     """
     return just(directory_identifier({'entries': []}))
 
 
 def unknown_directory():
     """
     Hypothesis strategy returning a random directory not ingested
     into the test archive.
     """
     return sha1().filter(
         lambda s: len(list(storage.directory_missing([hash_to_bytes(s)]))) > 0)
 
 
 def origin():
     """
     Hypothesis strategy returning a random origin ingested
     into the test archive.
     """
     return _known_swh_object('origins')
 
 
 def origin_with_multiple_visits():
     """
     Hypothesis strategy returning a random origin ingested
     into the test archive.
     """
     ret = []
     for origin in tests_data['origins']:
         visits = list(storage.origin_visit_get(origin['id']))
         if len(visits) > 1:
             ret.append(origin)
     return sampled_from(ret)
 
 
 def origin_with_release():
     """
     Hypothesis strategy returning a random origin ingested
     into the test archive.
     """
     ret = []
     for origin in tests_data['origins']:
         snapshot = storage.snapshot_get_latest(origin['id'])
         if any([b['target_type'] == 'release'
                 for b in snapshot['branches'].values()]):
             ret.append(origin)
     return sampled_from(ret)
 
 
 def unknown_origin_id():
     """
     Hypothesis strategy returning a random origin id not ingested
     into the test archive.
     """
     return integers(min_value=1000000)
 
 
 def new_origin():
     """
     Hypothesis strategy returning a random origin not ingested
     into the test archive.
     """
     return new_origin_strategy().filter(
         lambda origin: storage.origin_get(origin) is None)
 
 
 def new_origins(nb_origins=None):
     """
     Hypothesis strategy returning random origins not ingested
     into the test archive.
     """
     min_size = nb_origins if nb_origins is not None else 2
     max_size = nb_origins if nb_origins is not None else 8
     size = random.randint(min_size, max_size)
     return lists(new_origin(), min_size=size, max_size=size,
                  unique_by=lambda o: tuple(sorted(o.items())))
 
 
 def visit_dates(nb_dates=None):
     """
     Hypothesis strategy returning a list of visit dates.
     """
     min_size = nb_dates if nb_dates else 2
     max_size = nb_dates if nb_dates else 8
     return lists(datetimes(min_value=datetime(2015, 1, 1, 0, 0),
                            max_value=datetime(2018, 12, 31, 0, 0)),
                  min_size=min_size, max_size=max_size, unique=True).map(sorted)
 
 
 def release():
     """
     Hypothesis strategy returning a random release ingested
     into the test archive.
     """
     return _known_swh_object('releases')
 
 
 def unknown_release():
     """
     Hypothesis strategy returning a random revision not ingested
     into the test archive.
     """
     return sha1().filter(
         lambda s: next(storage.release_get([s])) is None)
 
 
 def revision():
     """
     Hypothesis strategy returning a random revision ingested
     into the test archive.
     """
     return _known_swh_object('revisions')
 
 
 def unknown_revision():
     """
     Hypothesis strategy returning a random revision not ingested
     into the test archive.
     """
     return sha1().filter(
         lambda s: next(storage.revision_get([hash_to_bytes(s)])) is None)
 
 
 @composite
 def new_person(draw):
     """
     Hypothesis strategy returning random raw swh person data.
     """
     name = draw(text(min_size=5, max_size=30,
                      alphabet=characters(min_codepoint=0, max_codepoint=255)))
     email = '%s@company.org' % name
     return {
         'name': name.encode(),
         'email': email.encode(),
         'fullname': ('%s <%s>' % (name, email)).encode()
     }
 
 
 @composite
 def new_swh_date(draw):
     """
     Hypothesis strategy returning random raw swh date data.
     """
     timestamp = draw(
         datetimes(min_value=datetime(2015, 1, 1, 0, 0),
                   max_value=datetime(2018, 12, 31, 0, 0)).map(
                       lambda d: int(d.timestamp())))
     return {
         'timestamp': timestamp,
         'offset': 0,
         'negative_utc': False,
     }
 
 
 @composite
 def new_revision(draw):
     """
     Hypothesis strategy returning random raw swh revision data
     not ingested into the test archive.
     """
     return {
         'id': draw(unknown_revision().map(hash_to_bytes)),
         'directory': draw(sha1().map(hash_to_bytes)),
         'author': draw(new_person()),
         'committer': draw(new_person()),
         'message': draw(
             text(min_size=20, max_size=100).map(lambda t: t.encode())),
         'date': draw(new_swh_date()),
         'committer_date': draw(new_swh_date()),
         'synthetic': False,
         'type': 'git',
         'parents': [],
         'metadata': [],
     }
 
 
 def revisions():
     """
     Hypothesis strategy returning random revisions ingested
     into the test archive.
     """
     return lists(revision(), min_size=2, max_size=8)
 
 
 def unknown_revisions():
     """
     Hypothesis strategy returning random revisions not ingested
     into the test archive.
     """
     return lists(unknown_revision(), min_size=2, max_size=8)
 
 
 def snapshot():
     """
     Hypothesis strategy returning a random snapshot ingested
     into the test archive.
     """
     return _known_swh_object('snapshots')
 
 
 def new_snapshots(nb_snapshots=None):
     min_size = nb_snapshots if nb_snapshots else 2
     max_size = nb_snapshots if nb_snapshots else 8
     return lists(new_snapshot(min_size=2, max_size=10, only_objects=True),
                  min_size=min_size, max_size=max_size)
 
 
 def unknown_snapshot():
     """
     Hypothesis strategy returning a random revision not ingested
     into the test archive.
     """
     return sha1().filter(
         lambda s: storage.snapshot_get(hash_to_bytes(s)) is None)
 
 
 def person():
     """
     Hypothesis strategy returning a random person ingested
     into the test archive.
     """
     return _known_swh_object('persons')
 
 
 def unknown_person():
     """
     Hypothesis strategy returning a random person not ingested
     into the test archive.
     """
     return integers(min_value=1000000)
 
 
 def _get_origin_dfs_revisions_walker():
     origin = random.choice(tests_data['origins'][:-1])
     snapshot = storage.snapshot_get_latest(origin['id'])
     head = snapshot['branches'][b'HEAD']['target']
     return get_revisions_walker('dfs', storage, head)
 
 
 def ancestor_revisions():
     """
     Hypothesis strategy returning a pair of revisions ingested into the
     test archive with an ancestor relation.
     """
     # get a dfs revisions walker for one of the origins
     # loaded into the test archive
     revisions_walker = _get_origin_dfs_revisions_walker()
     master_revisions = []
     children = defaultdict(list)
     init_rev_found = False
     # get revisions only authored in the master branch
     for rev in revisions_walker:
         for rev_p in rev['parents']:
             children[rev_p].append(rev['id'])
         if not init_rev_found:
             master_revisions.append(rev)
         if not rev['parents']:
             init_rev_found = True
 
     # head revision
     root_rev = master_revisions[0]
     # pick a random revision, different from head, only authored
     # in the master branch
     ancestor_rev_idx = random.choice(list(range(1, len(master_revisions)-1)))
     ancestor_rev = master_revisions[ancestor_rev_idx]
     ancestor_child_revs = children[ancestor_rev['id']]
 
     return just({
         'sha1_git_root': hash_to_hex(root_rev['id']),
         'sha1_git': hash_to_hex(ancestor_rev['id']),
         'children': [hash_to_hex(r) for r in ancestor_child_revs]
     })
 
 
 def non_ancestor_revisions():
     """
     Hypothesis strategy returning a pair of revisions ingested into the
     test archive with no ancestor relation.
     """
     # get a dfs revisions walker for one of the origins
     # loaded into the test archive
     revisions_walker = _get_origin_dfs_revisions_walker()
     merge_revs = []
     children = defaultdict(list)
     # get all merge revisions
     for rev in revisions_walker:
         if len(rev['parents']) > 1:
             merge_revs.append(rev)
         for rev_p in rev['parents']:
             children[rev_p].append(rev['id'])
     # find a merge revisions whose parents have a unique child revision
     random.shuffle(merge_revs)
     selected_revs = None
     for merge_rev in merge_revs:
         if all(len(children[rev_p]) == 1
                for rev_p in merge_rev['parents']):
             selected_revs = merge_rev['parents']
 
     return just({
         'sha1_git_root': hash_to_hex(selected_revs[0]),
         'sha1_git': hash_to_hex(selected_revs[1])
     })
 
 # The following strategies returns data specific to some tests
 # that can not be generated and thus are hardcoded.
 
 
 def contents_with_ctags():
     """
     Hypothesis strategy returning contents ingested into the test
     archive. Those contents are ctags compatible, that is running
     ctags on those lay results.
     """
     return just({
         'sha1s': ['0ab37c02043ebff946c1937523f60aadd0844351',
                   '15554cf7608dde6bfefac7e3d525596343a85b6f',
                   '2ce837f1489bdfb8faf3ebcc7e72421b5bea83bd',
                   '30acd0b47fc25e159e27a980102ddb1c4bea0b95',
                   '4f81f05aaea3efb981f9d90144f746d6b682285b',
                   '5153aa4b6e4455a62525bc4de38ed0ff6e7dd682',
                   '59d08bafa6a749110dfb65ba43a61963d5a5bf9f',
                   '7568285b2d7f31ae483ae71617bd3db873deaa2c',
                   '7ed3ee8e94ac52ba983dd7690bdc9ab7618247b4',
                   '8ed7ef2e7ff9ed845e10259d08e4145f1b3b5b03',
                   '9b3557f1ab4111c8607a4f2ea3c1e53c6992916c',
                   '9c20da07ed14dc4fcd3ca2b055af99b2598d8bdd',
                   'c20ceebd6ec6f7a19b5c3aebc512a12fbdc9234b',
                   'e89e55a12def4cd54d5bff58378a3b5119878eb7',
                   'e8c0654fe2d75ecd7e0b01bee8a8fc60a130097e',
                   'eb6595e559a1d34a2b41e8d4835e0e4f98a5d2b5'],
         'symbol_name': 'ABS'
     })
 
 
 def revision_with_submodules():
     """
     Hypothesis strategy returning a revision that is known to
     point to a directory with revision entries (aka git submodule)
     """
     return just({
         'rev_sha1_git': 'ffcb69001f3f6745dfd5b48f72ab6addb560e234',
         'rev_dir_sha1_git': 'd92a21446387fa28410e5a74379c934298f39ae2',
         'rev_dir_rev_path': 'libtess2'
     })